From 326d59fa8df47fbb41c0276bc1871652851f1642 Mon Sep 17 00:00:00 2001 From: "Daniel Doubrovkine (dB.)" Date: Fri, 15 Oct 2021 14:31:22 -0400 Subject: [PATCH] Added platform to manifests. (#751) Signed-off-by: dblock --- src/assemble_workflow/bundle_recorder.py | 13 ++++- src/build_workflow/build_recorder.py | 3 +- src/manifests/build_manifest.py | 19 ++++--- src/manifests/bundle_manifest.py | 36 ++++++++---- src/run_assemble.py | 2 +- src/run_integ_test.py | 22 +++++-- src/test_workflow/dependency_installer.py | 11 ++-- .../integ_test/local_test_cluster.py | 57 ++++++++++++++----- .../perf_test/perf_test_cluster.py | 52 +++++++++-------- src/test_workflow/test_args.py | 9 +++ tests/data/opensearch-build-1.1.0.yml | 3 +- .../data/opensearch-build-1.1.0.yml | 3 +- .../opensearch-dashboards-build-1.1.0.yml | 3 +- .../test_bundle_recorder.py | 55 +++++++++++------- tests/tests_assemble_workflow/test_bundles.py | 3 +- .../test_build_recorder.py | 7 ++- .../data/opensearch-build-1.1.0.yml | 3 +- .../data/opensearch-build-1.2.0.yml | 3 +- .../data/opensearch-bundle-1.1.0.yml | 3 +- tests/tests_manifests/test_build_manifest.py | 7 ++- tests/tests_manifests/test_bundle_manifest.py | 17 ++++-- .../bwc_test/data/test_manifest.yaml | 3 +- .../bwc_test/test_run_bwc_test.py | 9 +-- .../integ_test/data/build_manifest.yml | 3 +- .../build_manifest_missing_components.yml | 3 +- .../integ_test/data/bundle_manifest.yml | 3 +- .../integ_test/test_local_test_cluster.py | 54 +++++++++++++----- .../integ_test/test_run_integ_test.py | 27 +++++++-- .../perf_test/data/bundle_manifest.yaml | 3 +- tests/tests_test_workflow/test_test_args.py | 37 ++++++++++++ 30 files changed, 334 insertions(+), 139 deletions(-) diff --git a/src/assemble_workflow/bundle_recorder.py b/src/assemble_workflow/bundle_recorder.py index 5d8f1fdbeb..54175c0c47 100644 --- a/src/assemble_workflow/bundle_recorder.py +++ b/src/assemble_workflow/bundle_recorder.py @@ -23,12 +23,18 @@ def __init__(self, build, output_dir, artifacts_dir): build.id, build.name, build.version, + build.platform, build.architecture, self.__get_tar_location(), ) def __get_tar_name(self, build): - parts = [build.name.lower().replace(" ", "-"), build.version, "linux", build.architecture] + parts = [ + build.name.lower().replace(" ", "-"), + build.version, + build.platform, + build.architecture, + ] return "-".join(parts) + ".tar.gz" def __get_public_url_path(self, folder, rel_path): @@ -70,15 +76,16 @@ def write_manifest(self, folder): self.get_manifest().to_file(manifest_path) class BundleManifestBuilder: - def __init__(self, build_id, name, version, arch, location): + def __init__(self, build_id, name, version, platform, arch, location): self.data = {} self.data["build"] = {} self.data["build"]["id"] = build_id self.data["build"]["name"] = name self.data["build"]["version"] = str(version) + self.data["build"]["platform"] = platform self.data["build"]["architecture"] = arch self.data["build"]["location"] = location - self.data["schema-version"] = "1.0" + self.data["schema-version"] = "1.1" # We need to store components as a hash so that we can append artifacts by component name # When we convert to a BundleManifest this will get converted back into a list self.data["components"] = [] diff --git a/src/build_workflow/build_recorder.py b/src/build_workflow/build_recorder.py index f725bee977..cd05e35786 100644 --- a/src/build_workflow/build_recorder.py +++ b/src/build_workflow/build_recorder.py @@ -61,8 +61,9 @@ def __init__(self, target): self.data["build"]["id"] = target.build_id self.data["build"]["name"] = target.name self.data["build"]["version"] = target.opensearch_version + self.data["build"]["platform"] = target.platform self.data["build"]["architecture"] = target.arch - self.data["schema-version"] = "1.1" + self.data["schema-version"] = "1.2" self.components_hash = {} def append_component(self, name, version, repository_url, ref, commit_id): diff --git a/src/manifests/build_manifest.py b/src/manifests/build_manifest.py index 868969f2d2..442cc4d07c 100644 --- a/src/manifests/build_manifest.py +++ b/src/manifests/build_manifest.py @@ -14,11 +14,12 @@ The manifest contains information about the product that was built (in the `build` section), and the components that made up the build in the `components` section. -The format for schema version 1.0 is: -schema-version: "1.0" +The format for schema version 1.2 is: +schema-version: "1.2" build: name: string version: string + platform: linux or darwin architecture: x64 or arm64 components: - name: string @@ -47,13 +48,14 @@ class BuildManifest(Manifest): "required": True, "type": "dict", "schema": { + "platform": {"required": True, "type": "string"}, "architecture": {"required": True, "type": "string"}, "id": {"required": True, "type": "string"}, "name": {"required": True, "type": "string"}, "version": {"required": True, "type": "string"}, }, }, - "schema-version": {"required": True, "type": "string", "allowed": ["1.1"]}, + "schema-version": {"required": True, "type": "string", "allowed": ["1.2"]}, "components": { "type": "list", "schema": { @@ -89,7 +91,7 @@ def __init__(self, data): def __to_dict__(self): return { - "schema-version": "1.1", + "schema-version": "1.2", "build": self.build.__to_dict__(), "components": list( map(lambda component: component.__to_dict__(), self.components) @@ -109,15 +111,16 @@ def get_component(self, component_name): @staticmethod def get_build_manifest_relative_location( - build_id, opensearch_version, architecture + build_id, opensearch_version, platform, architecture ): + # TODO: use platform, https://github.com/opensearch-project/opensearch-build/issues/669 return f"builds/{opensearch_version}/{build_id}/{architecture}/manifest.yml" @staticmethod - def from_s3(bucket_name, build_id, opensearch_version, architecture, work_dir=None): + def from_s3(bucket_name, build_id, opensearch_version, platform, architecture, work_dir=None): work_dir = work_dir if not None else str(os.getcwd()) manifest_s3_path = BuildManifest.get_build_manifest_relative_location( - build_id, opensearch_version, architecture + build_id, opensearch_version, platform, architecture ) S3Bucket(bucket_name).download_file(manifest_s3_path, work_dir) build_manifest = BuildManifest.from_path("manifest.yml") @@ -131,6 +134,7 @@ class Build: def __init__(self, data): self.name = data["name"] self.version = data["version"] + self.platform = data["platform"] self.architecture = data["architecture"] self.id = data["id"] @@ -138,6 +142,7 @@ def __to_dict__(self): return { "name": self.name, "version": self.version, + "platform": self.platform, "architecture": self.architecture, "id": self.id, } diff --git a/src/manifests/bundle_manifest.py b/src/manifests/bundle_manifest.py index ed5beffff9..ca5c1b627b 100644 --- a/src/manifests/bundle_manifest.py +++ b/src/manifests/bundle_manifest.py @@ -16,11 +16,12 @@ class BundleManifest(Manifest): The manifest contains information about the bundle that was built (in the `assemble` section), and the components that made up the bundle in the `components` section. - The format for schema version 1.0 is: - schema-version: "1.0" + The format for schema version 1.1 is: + schema-version: "1.1" build: name: string version: string + platform: linux or darwin architecture: x64 or arm64 location: /relative/path/to/tarball components: @@ -36,6 +37,7 @@ class BundleManifest(Manifest): "required": True, "type": "dict", "schema": { + "platform": {"required": True, "type": "string"}, "architecture": {"required": True, "type": "string"}, "id": {"required": True, "type": "string"}, "location": {"required": True, "type": "string"}, @@ -43,7 +45,7 @@ class BundleManifest(Manifest): "version": {"required": True, "type": "string"}, }, }, - "schema-version": {"required": True, "type": "string", "allowed": ["1.0"]}, + "schema-version": {"required": True, "type": "string", "allowed": ["1.1"]}, "components": { "required": True, "type": "list", @@ -69,7 +71,7 @@ def __init__(self, data): def __to_dict__(self): return { - "schema-version": "1.0", + "schema-version": "1.1", "build": self.build.__to_dict__(), "components": list( map(lambda component: component.__to_dict__(), self.components) @@ -77,34 +79,43 @@ def __to_dict__(self): } @staticmethod - def from_s3(bucket_name, build_id, opensearch_version, architecture, work_dir=None): + def from_s3( + bucket_name, build_id, opensearch_version, platform, architecture, work_dir=None + ): work_dir = work_dir if not None else str(os.getcwd()) manifest_s3_path = BundleManifest.get_bundle_manifest_relative_location( - build_id, opensearch_version, architecture + build_id, opensearch_version, platform, architecture ) S3Bucket(bucket_name).download_file(manifest_s3_path, work_dir) - bundle_manifest = BundleManifest.from_path(os.path.join(work_dir, 'manifest.yml')) + bundle_manifest = BundleManifest.from_path( + os.path.join(work_dir, "manifest.yml") + ) os.remove(os.path.realpath(os.path.join(work_dir, "manifest.yml"))) return bundle_manifest @staticmethod - def get_tarball_relative_location(build_id, opensearch_version, architecture): - return f"bundles/{opensearch_version}/{build_id}/{architecture}/opensearch-{opensearch_version}-linux-{architecture}.tar.gz" + def get_tarball_relative_location( + build_id, opensearch_version, platform, architecture + ): + # TODO: use platform, https://github.com/opensearch-project/opensearch-build/issues/669 + return f"bundles/{opensearch_version}/{build_id}/{architecture}/opensearch-{opensearch_version}-{platform}-{architecture}.tar.gz" @staticmethod - def get_tarball_name(opensearch_version, architecture): - return f"opensearch-{opensearch_version}-linux-{architecture}.tar.gz" + def get_tarball_name(opensearch_version, platform, architecture): + return f"opensearch-{opensearch_version}-{platform}-{architecture}.tar.gz" @staticmethod def get_bundle_manifest_relative_location( - build_id, opensearch_version, architecture + build_id, opensearch_version, platform, architecture ): + # TODO: use platform, https://github.com/opensearch-project/opensearch-build/issues/669 return f"bundles/{opensearch_version}/{build_id}/{architecture}/manifest.yml" class Build: def __init__(self, data): self.name = data["name"] self.version = data["version"] + self.platform = data["platform"] self.architecture = data["architecture"] self.location = data["location"] self.id = data["id"] @@ -113,6 +124,7 @@ def __to_dict__(self): return { "name": self.name, "version": self.version, + "platform": self.platform, "architecture": self.architecture, "location": self.location, "id": self.id, diff --git a/src/run_assemble.py b/src/run_assemble.py index 458fe1aa85..0a322bb28c 100755 --- a/src/run_assemble.py +++ b/src/run_assemble.py @@ -55,7 +55,7 @@ def main(): with tempfile.TemporaryDirectory() as work_dir: logging.info( - f"Bundling {build.name} ({build.architecture}) into {output_dir} ..." + f"Bundling {build.name} ({build.architecture}) on {build.platform} into {output_dir} ..." ) os.chdir(work_dir) diff --git a/src/run_integ_test.py b/src/run_integ_test.py index 6734e759b6..507b3cff38 100755 --- a/src/run_integ_test.py +++ b/src/run_integ_test.py @@ -34,7 +34,9 @@ def pull_build_repo(work_dir): def main(): args = TestArgs() console.configure(level=args.logging_level) - test_manifest_path = os.path.join(os.path.dirname(__file__), 'test_workflow/config/test_manifest.yml') + test_manifest_path = os.path.join( + os.path.dirname(__file__), "test_workflow/config/test_manifest.yml" + ) test_manifest = TestManifest.from_path(test_manifest_path) integ_test_config = dict() for component in test_manifest.components: @@ -45,9 +47,21 @@ def main(): test_recorder = TestRecorder(args.test_run_id, "integ-test", work_dir) os.chdir(work_dir) bundle_manifest = BundleManifest.from_s3( - args.s3_bucket, args.build_id, args.opensearch_version, args.architecture, work_dir) + args.s3_bucket, + args.build_id, + args.opensearch_version, + args.platform, + args.architecture, + work_dir, + ) build_manifest = BuildManifest.from_s3( - args.s3_bucket, args.build_id, args.opensearch_version, args.architecture, work_dir) + args.s3_bucket, + args.build_id, + args.opensearch_version, + args.platform, + args.architecture, + work_dir, + ) pull_build_repo(work_dir) DependencyInstaller(build_manifest.build).install_all_maven_dependencies() all_results = TestSuiteResults() @@ -60,7 +74,7 @@ def main(): build_manifest, work_dir, args.s3_bucket, - test_recorder + test_recorder, ) test_results = test_suite.execute() all_results.append(component.name, test_results) diff --git a/src/test_workflow/dependency_installer.py b/src/test_workflow/dependency_installer.py index 6d348d0fec..6a669eae8b 100644 --- a/src/test_workflow/dependency_installer.py +++ b/src/test_workflow/dependency_installer.py @@ -20,14 +20,11 @@ class DependencyInstaller: def __init__(self, build): self.build_id = build.id self.version = build.version - self.arch = build.architecture + self.platform = build.platform + self.architecture = build.architecture self.s3_bucket = S3Bucket(self.ARTIFACT_S3_BUCKET) - self.s3_maven_location = ( - f"builds/{self.version}/{self.build_id}/{self.arch}/maven/org/opensearch" - ) - self.s3_build_location = ( - f"builds/{self.version}/{self.build_id}/{self.arch}/plugins" - ) + self.s3_maven_location = f"builds/{self.version}/{self.build_id}/{self.platform}/{self.architecture}/maven/org/opensearch" + self.s3_build_location = f"builds/{self.version}/{self.build_id}/{self.platform}/{self.architecture}/plugins" self.maven_local_path = os.path.join( os.path.expanduser("~"), ".m2/repository/org/opensearch/" ) diff --git a/src/test_workflow/integ_test/local_test_cluster.py b/src/test_workflow/integ_test/local_test_cluster.py index cf6e0229c8..5275ec5e0f 100644 --- a/src/test_workflow/integ_test/local_test_cluster.py +++ b/src/test_workflow/integ_test/local_test_cluster.py @@ -26,10 +26,17 @@ class LocalTestCluster(TestCluster): Represents an on-box test cluster. This class downloads a bundle (from a BundleManifest) and runs it as a background process. """ - def __init__(self, work_dir, component_name, additional_cluster_config, bundle_manifest, security_enabled, - component_test_config, - test_recorder: TestRecorder, - s3_bucket_name=None): + def __init__( + self, + work_dir, + component_name, + additional_cluster_config, + bundle_manifest, + security_enabled, + component_test_config, + test_recorder: TestRecorder, + s3_bucket_name=None, + ): self.manifest = bundle_manifest self.work_dir = os.path.join(work_dir, "local-test-cluster") os.makedirs(self.work_dir, exist_ok=True) @@ -49,8 +56,10 @@ def create_cluster(self): if not self.security_enabled: self.disable_security(self.install_dir) if self.additional_cluster_config is not None: - self.__add_plugin_specific_config(self.additional_cluster_config, - os.path.join(self.install_dir, "config", "opensearch.yml")) + self.__add_plugin_specific_config( + self.additional_cluster_config, + os.path.join(self.install_dir, "config", "opensearch.yml"), + ) self.process = subprocess.Popen( "./opensearch-tar-install.sh", cwd=self.install_dir, @@ -73,8 +82,14 @@ def destroy(self): return self.terminate_process() log_files = walk(os.path.join(self.work_dir, self.install_dir, "logs")) - test_result_data = TestResultData(self.component_name, self.component_test_config, self.return_code, - self.local_cluster_stdout, self.local_cluster_stderr, log_files) + test_result_data = TestResultData( + self.component_name, + self.component_test_config, + self.return_code, + self.local_cluster_stdout, + self.local_cluster_stderr, + log_files, + ) self.save_logs.save_test_result_data(test_result_data) def url(self, path=""): @@ -82,16 +97,24 @@ def url(self, path=""): def __download_tarball_from_s3(self): s3_path = BundleManifest.get_tarball_relative_location( - self.manifest.build.id, self.manifest.build.version, self.manifest.build.architecture) + self.manifest.build.id, + self.manifest.build.version, + self.manifest.build.platform, + self.manifest.build.architecture, + ) S3Bucket(self.bucket_name).download_file(s3_path, self.work_dir) - return BundleManifest.get_tarball_name(self.manifest.build.version, self.manifest.build.architecture) + return BundleManifest.get_tarball_name( + self.manifest.build.version, + self.manifest.build.platform, + self.manifest.build.architecture, + ) def download(self): logging.info(f"Creating local test cluster in {self.work_dir}") os.chdir(self.work_dir) logging.info("Downloading bundle from s3") bundle_name = self.__download_tarball_from_s3() - logging.info(f'Downloaded bundle to {os.path.realpath(bundle_name)}') + logging.info(f"Downloaded bundle to {os.path.realpath(bundle_name)}") logging.info("Unpacking") subprocess.check_call(f"tar -xzf {bundle_name}", shell=True) logging.info("Unpacked") @@ -115,7 +138,9 @@ def wait_for_service(self): logging.info(f"Pinging {url} attempt {attempt}") response = requests.get(url, verify=False, auth=("admin", "admin")) logging.info(f"{response.status_code}: {response.text}") - if response.status_code == 200 and ('"status":"green"' or '"status":"yellow"' in response.text): + if response.status_code == 200 and ( + '"status":"green"' or '"status":"yellow"' in response.text + ): logging.info("Service is available") return except requests.exceptions.ConnectionError: @@ -148,9 +173,13 @@ def terminate_process(self): raise finally: logging.info(f"Process terminated with exit code {self.process.returncode}") - with open(os.path.join(os.path.realpath(self.work_dir), self.stdout.name), "r") as stdout: + with open( + os.path.join(os.path.realpath(self.work_dir), self.stdout.name), "r" + ) as stdout: self.local_cluster_stdout = stdout.read() - with open(os.path.join(os.path.realpath(self.work_dir), self.stderr.name), "r") as stderr: + with open( + os.path.join(os.path.realpath(self.work_dir), self.stderr.name), "r" + ) as stderr: self.local_cluster_stderr = stderr.read() self.return_code = self.process.returncode self.stdout.close() diff --git a/src/test_workflow/perf_test/perf_test_cluster.py b/src/test_workflow/perf_test/perf_test_cluster.py index 7e85922976..58f6f391a1 100644 --- a/src/test_workflow/perf_test/perf_test_cluster.py +++ b/src/test_workflow/perf_test/perf_test_cluster.py @@ -10,54 +10,60 @@ class PerfTestCluster(TestCluster): """ Represents a performance test cluster. This class deploys the opensearch bundle with CDK and returns the private IP. """ - def __init__(self, bundle_manifest, config, stack_name, security, current_workspace): + + def __init__( + self, bundle_manifest, config, stack_name, security, current_workspace + ): self.manifest = bundle_manifest - self.work_dir = 'opensearch-cluster/cdk/single-node/' + self.work_dir = "opensearch-cluster/cdk/single-node/" self.current_workspace = current_workspace self.stack_name = stack_name self.cluster_endpoint = None self.cluster_port = None - self.output_file = 'output.json' + self.output_file = "output.json" self.ip_address = None - self.security = 'enable' if security else 'disable' - role = config['Constants']['Role'] + self.security = "enable" if security else "disable" + role = config["Constants"]["Role"] params_dict = { - 'url': self.manifest.build.location, - 'security_group_id': config['Constants']['SecurityGroupId'], - 'vpc_id': config['Constants']['VpcId'], - 'account_id': config['Constants']['AccountId'], - 'region': config['Constants']['Region'], - 'stack_name': self.stack_name, - 'security': self.security, - 'architecture': self.manifest.build.architecture, + "url": self.manifest.build.location, + "security_group_id": config["Constants"]["SecurityGroupId"], + "vpc_id": config["Constants"]["VpcId"], + "account_id": config["Constants"]["AccountId"], + "region": config["Constants"]["Region"], + "stack_name": self.stack_name, + "security": self.security, + "platform": self.manifest.build.platform, + "architecture": self.manifest.build.architecture, } params_list = [] for key, value in params_dict.items(): - params_list.append(f' -c {key}={value}') - role_params = f' --require-approval=never --plugin cdk-assume-role-credential-plugin'\ - f' -c assume-role-credentials:writeIamRoleName={role} -c assume-role-credentials:readIamRoleName={role} ' - self.params = ''.join(params_list) + role_params + params_list.append(f" -c {key}={value}") + role_params = ( + f" --require-approval=never --plugin cdk-assume-role-credential-plugin" + f" -c assume-role-credentials:writeIamRoleName={role} -c assume-role-credentials:readIamRoleName={role} " + ) + self.params = "".join(params_list) + role_params def create_cluster(self): os.chdir(self.work_dir) - command = f'cdk deploy {self.params} --outputs-file {self.output_file}' + command = f"cdk deploy {self.params} --outputs-file {self.output_file}" logging.info(f'Executing "{command}" in {os.getcwd()}') subprocess.check_call(command, cwd=os.getcwd(), shell=True) - with open(self.output_file, 'r') as read_file: + with open(self.output_file, "r") as read_file: load_output = json.load(read_file) - self.ip_address = load_output[self.stack_name]['PrivateIp'] - logging.info('Private IP:', self.ip_address) + self.ip_address = load_output[self.stack_name]["PrivateIp"] + logging.info("Private IP:", self.ip_address) def endpoint(self): self.cluster_endpoint = self.ip_address return self.cluster_endpoint def port(self): - self.cluster_port = 443 if self.security == 'enable' else 9200 + self.cluster_port = 443 if self.security == "enable" else 9200 return self.cluster_port def destroy(self): os.chdir(os.path.join(self.current_workspace, self.work_dir)) - command = f'cdk destroy {self.params} --force' + command = f"cdk destroy {self.params} --force" logging.info(f'Executing "{command}" in {os.getcwd()}') subprocess.check_call(command, cwd=os.getcwd(), shell=True) diff --git a/src/test_workflow/test_args.py b/src/test_workflow/test_args.py index 2b26121ae9..3b709240a6 100644 --- a/src/test_workflow/test_args.py +++ b/src/test_workflow/test_args.py @@ -23,6 +23,7 @@ def __call__(self, parser, namespace, values, option_string=None): s3_bucket: str opensearch_version: str build_id: int + platform: str architecture: str test_run_id: int component: str @@ -47,6 +48,13 @@ def __init__(self): help="The build id for the built artifact", required=True, ) + parser.add_argument( + "--platform", + type=str, + choices=["linux", "darwin"], + help="The os name e.g. linux, darwin", + required=True, + ) parser.add_argument( "--architecture", type=str, @@ -80,6 +88,7 @@ def __init__(self): self.s3_bucket = args.s3_bucket self.opensearch_version = args.opensearch_version self.build_id = args.build_id + self.platform = args.platform self.architecture = args.architecture self.test_run_id = args.test_run_id self.component = args.component diff --git a/tests/data/opensearch-build-1.1.0.yml b/tests/data/opensearch-build-1.1.0.yml index f3817d60b1..744c7b97da 100644 --- a/tests/data/opensearch-build-1.1.0.yml +++ b/tests/data/opensearch-build-1.1.0.yml @@ -1,4 +1,5 @@ build: + platform: linux architecture: x64 id: c3ff7a232d25403fa8cc14c97799c323 name: OpenSearch @@ -2480,4 +2481,4 @@ components: ref: main repository: https://github.com/opensearch-project/dashboards-notebooks.git version: 1.1.0.0 -schema-version: '1.1' +schema-version: '1.2' diff --git a/tests/tests_assemble_workflow/data/opensearch-build-1.1.0.yml b/tests/tests_assemble_workflow/data/opensearch-build-1.1.0.yml index f3817d60b1..744c7b97da 100644 --- a/tests/tests_assemble_workflow/data/opensearch-build-1.1.0.yml +++ b/tests/tests_assemble_workflow/data/opensearch-build-1.1.0.yml @@ -1,4 +1,5 @@ build: + platform: linux architecture: x64 id: c3ff7a232d25403fa8cc14c97799c323 name: OpenSearch @@ -2480,4 +2481,4 @@ components: ref: main repository: https://github.com/opensearch-project/dashboards-notebooks.git version: 1.1.0.0 -schema-version: '1.1' +schema-version: '1.2' diff --git a/tests/tests_assemble_workflow/data/opensearch-dashboards-build-1.1.0.yml b/tests/tests_assemble_workflow/data/opensearch-dashboards-build-1.1.0.yml index 03119fae29..89c2fe711f 100644 --- a/tests/tests_assemble_workflow/data/opensearch-dashboards-build-1.1.0.yml +++ b/tests/tests_assemble_workflow/data/opensearch-dashboards-build-1.1.0.yml @@ -1,4 +1,5 @@ build: + platform: linux architecture: x64 id: c94ebec444a94ada86a230c9297b1d73 name: OpenSearch Dashboards @@ -20,4 +21,4 @@ components: ref: main repository: https://github.com/opensearch-project/alerting-dashboards-plugin version: 1.1.0.0 -schema-version: '1.1' +schema-version: '1.2' diff --git a/tests/tests_assemble_workflow/test_bundle_recorder.py b/tests/tests_assemble_workflow/test_bundle_recorder.py index 63194aa3f0..225b865f4e 100644 --- a/tests/tests_assemble_workflow/test_bundle_recorder.py +++ b/tests/tests_assemble_workflow/test_bundle_recorder.py @@ -43,6 +43,7 @@ def test_record_component(self): self.bundle_recorder.get_manifest().to_dict(), { "build": { + "platform": "linux", "architecture": "x64", "id": "c3ff7a232d25403fa8cc14c97799c323", "location": "output_dir/opensearch-1.1.0-linux-x64.tar.gz", @@ -58,7 +59,7 @@ def test_record_component(self): "repository": "https://github.com/opensearch-project/job_scheduler", } ], - "schema-version": "1.0", + "schema-version": "1.1", }, ) @@ -69,13 +70,14 @@ def test_get_manifest(self): manifest.to_dict(), { "build": { + "platform": "linux", "architecture": "x64", "id": "c3ff7a232d25403fa8cc14c97799c323", "location": "output_dir/opensearch-1.1.0-linux-x64.tar.gz", "name": "OpenSearch", "version": "1.1.0", }, - "schema-version": "1.0", + "schema-version": "1.1", }, ) @@ -89,7 +91,7 @@ def test_write_manifest(self): self.assertEqual(yaml.safe_load(f), data) def test_record_component_public(self): - self.bundle_recorder.public_url = 'https://ci.opensearch.org/ci/os-distro-prod' + self.bundle_recorder.public_url = "https://ci.opensearch.org/ci/os-distro-prod" component = BuildManifest.Component( { "name": "job_scheduler", @@ -106,6 +108,7 @@ def test_record_component_public(self): self.bundle_recorder.get_manifest().to_dict(), { "build": { + "platform": "linux", "architecture": "x64", "id": "c3ff7a232d25403fa8cc14c97799c323", "location": "output_dir/opensearch-1.1.0-linux-x64.tar.gz", @@ -121,32 +124,36 @@ def test_record_component_public(self): "repository": "https://github.com/opensearch-project/job_scheduler", } ], - "schema-version": "1.0", + "schema-version": "1.1", }, ) def test_get_location_scenarios(self): def get_location(public_url): self.bundle_recorder.public_url = public_url - return self.bundle_recorder._BundleRecorder__get_location("builds", "dir1/dir2/file", "/tmp/builds/foo/dir1/dir2/file") + return self.bundle_recorder._BundleRecorder__get_location( + "builds", "dir1/dir2/file", "/tmp/builds/foo/dir1/dir2/file" + ) # No public URL - Fallback to ABS Path self.assertEqual(get_location(None), "/tmp/builds/foo/dir1/dir2/file") # Public URL - No trailing slash self.assertEqual( - get_location('https://ci.opensearch.org/ci/os-distro-prod'), - "https://ci.opensearch.org/ci/os-distro-prod/builds/1.1.0/c3ff7a232d25403fa8cc14c97799c323/x64/dir1/dir2/file" + get_location("https://ci.opensearch.org/ci/os-distro-prod"), + "https://ci.opensearch.org/ci/os-distro-prod/builds/1.1.0/c3ff7a232d25403fa8cc14c97799c323/x64/dir1/dir2/file", ) # Public URL - Trailing slash self.assertEqual( - get_location('https://ci.opensearch.org/ci/os-distro-prod/'), - "https://ci.opensearch.org/ci/os-distro-prod/builds/1.1.0/c3ff7a232d25403fa8cc14c97799c323/x64/dir1/dir2/file" + get_location("https://ci.opensearch.org/ci/os-distro-prod/"), + "https://ci.opensearch.org/ci/os-distro-prod/builds/1.1.0/c3ff7a232d25403fa8cc14c97799c323/x64/dir1/dir2/file", ) def test_tar_name(self): - self.assertEqual(self.bundle_recorder.tar_name, "opensearch-1.1.0-linux-x64.tar.gz") + self.assertEqual( + self.bundle_recorder.tar_name, "opensearch-1.1.0-linux-x64.tar.gz" + ) class TestBundleRecorderDashboards(unittest.TestCase): @@ -176,6 +183,7 @@ def test_record_component(self): self.bundle_recorder.get_manifest().to_dict(), { "build": { + "platform": "linux", "architecture": "x64", "id": "c94ebec444a94ada86a230c9297b1d73", "location": "output_dir/opensearch-dashboards-1.1.0-linux-x64.tar.gz", @@ -191,7 +199,7 @@ def test_record_component(self): "repository": "https://github.com/opensearch-project/alerting-dashboards-plugin", } ], - "schema-version": "1.0", + "schema-version": "1.1", }, ) @@ -202,13 +210,14 @@ def test_get_manifest(self): manifest.to_dict(), { "build": { + "platform": "linux", "architecture": "x64", "id": "c94ebec444a94ada86a230c9297b1d73", "location": "output_dir/opensearch-dashboards-1.1.0-linux-x64.tar.gz", "name": "OpenSearch Dashboards", "version": "1.1.0", }, - "schema-version": "1.0", + "schema-version": "1.1", }, ) @@ -222,7 +231,7 @@ def test_write_manifest(self): self.assertEqual(yaml.safe_load(f), data) def test_record_component_public(self): - self.bundle_recorder.public_url = 'https://ci.opensearch.org/ci/os-distro-prod' + self.bundle_recorder.public_url = "https://ci.opensearch.org/ci/os-distro-prod" component = BuildManifest.Component( { "name": "alertingDashboards", @@ -239,6 +248,7 @@ def test_record_component_public(self): self.bundle_recorder.get_manifest().to_dict(), { "build": { + "platform": "linux", "architecture": "x64", "id": "c94ebec444a94ada86a230c9297b1d73", "location": "output_dir/opensearch-dashboards-1.1.0-linux-x64.tar.gz", @@ -254,29 +264,34 @@ def test_record_component_public(self): "repository": "https://github.com/opensearch-project/alerting-dashboards-plugin", } ], - "schema-version": "1.0", + "schema-version": "1.1", }, ) def test_get_location_scenarios(self): def get_location(public_url): self.bundle_recorder.public_url = public_url - return self.bundle_recorder._BundleRecorder__get_location("builds", "dir1/dir2/file", "/tmp/builds/foo/dir1/dir2/file") + return self.bundle_recorder._BundleRecorder__get_location( + "builds", "dir1/dir2/file", "/tmp/builds/foo/dir1/dir2/file" + ) # No public URL - Fallback to ABS Path self.assertEqual(get_location(None), "/tmp/builds/foo/dir1/dir2/file") # Public URL - No trailing slash self.assertEqual( - get_location('https://ci.opensearch.org/ci/os-distro-prod'), - "https://ci.opensearch.org/ci/os-distro-prod/builds/1.1.0/c94ebec444a94ada86a230c9297b1d73/x64/dir1/dir2/file" + get_location("https://ci.opensearch.org/ci/os-distro-prod"), + "https://ci.opensearch.org/ci/os-distro-prod/builds/1.1.0/c94ebec444a94ada86a230c9297b1d73/x64/dir1/dir2/file", ) # Public URL - Trailing slash self.assertEqual( - get_location('https://ci.opensearch.org/ci/os-distro-prod/'), - "https://ci.opensearch.org/ci/os-distro-prod/builds/1.1.0/c94ebec444a94ada86a230c9297b1d73/x64/dir1/dir2/file" + get_location("https://ci.opensearch.org/ci/os-distro-prod/"), + "https://ci.opensearch.org/ci/os-distro-prod/builds/1.1.0/c94ebec444a94ada86a230c9297b1d73/x64/dir1/dir2/file", ) def test_tar_name(self): - self.assertEqual(self.bundle_recorder.tar_name, "opensearch-dashboards-1.1.0-linux-x64.tar.gz") + self.assertEqual( + self.bundle_recorder.tar_name, + "opensearch-dashboards-1.1.0-linux-x64.tar.gz", + ) diff --git a/tests/tests_assemble_workflow/test_bundles.py b/tests/tests_assemble_workflow/test_bundles.py index 4d897183ba..f77ceb1041 100644 --- a/tests/tests_assemble_workflow/test_bundles.py +++ b/tests/tests_assemble_workflow/test_bundles.py @@ -39,9 +39,10 @@ def test_bundle_opensearch_dashboards(self): def test_bundle_opensearch_invalid(self): manifest = BuildManifest( { - "schema-version": "1.1", + "schema-version": "1.2", "build": { "name": "invalid", + "platform": "linux", "architecture": "x86", "id": "id", "version": "1.0.0", diff --git a/tests/tests_build_workflow/test_build_recorder.py b/tests/tests_build_workflow/test_build_recorder.py index fff6e29e40..c0e3071dd5 100644 --- a/tests/tests_build_workflow/test_build_recorder.py +++ b/tests/tests_build_workflow/test_build_recorder.py @@ -28,6 +28,7 @@ def __mock(self, snapshot=True): output_dir="output_dir", name="OpenSearch", version="1.1.0", + platform="linux", arch="x64", snapshot=snapshot, ) @@ -55,6 +56,7 @@ def test_record_component_and_artifact(self, mock_makedirs, mock_copyfile): recorder.get_manifest().to_dict(), { "build": { + "platform": "linux", "architecture": "x64", "id": "1", "name": "OpenSearch", @@ -70,7 +72,7 @@ def test_record_component_and_artifact(self, mock_makedirs, mock_copyfile): "version": "1.1.0.0", } ], - "schema-version": "1.1", + "schema-version": "1.2", }, ) @@ -129,12 +131,13 @@ def test_get_manifest(self): manifest.to_dict(), { "build": { + "platform": "linux", "architecture": "x64", "id": "1", "name": "OpenSearch", "version": "1.1.0", }, - "schema-version": "1.1", + "schema-version": "1.2", }, ) diff --git a/tests/tests_manifests/data/opensearch-build-1.1.0.yml b/tests/tests_manifests/data/opensearch-build-1.1.0.yml index f3817d60b1..744c7b97da 100644 --- a/tests/tests_manifests/data/opensearch-build-1.1.0.yml +++ b/tests/tests_manifests/data/opensearch-build-1.1.0.yml @@ -1,4 +1,5 @@ build: + platform: linux architecture: x64 id: c3ff7a232d25403fa8cc14c97799c323 name: OpenSearch @@ -2480,4 +2481,4 @@ components: ref: main repository: https://github.com/opensearch-project/dashboards-notebooks.git version: 1.1.0.0 -schema-version: '1.1' +schema-version: '1.2' diff --git a/tests/tests_manifests/data/opensearch-build-1.2.0.yml b/tests/tests_manifests/data/opensearch-build-1.2.0.yml index be2881affa..a550d829d5 100644 --- a/tests/tests_manifests/data/opensearch-build-1.2.0.yml +++ b/tests/tests_manifests/data/opensearch-build-1.2.0.yml @@ -1,4 +1,5 @@ build: + platform: linux architecture: x64 id: c3ff7a232d25403fa8cc14c97799c323 name: OpenSearch @@ -2480,4 +2481,4 @@ components: ref: main repository: https://github.com/opensearch-project/dashboards-notebooks.git version: 1.2.0.0 -schema-version: '1.1' +schema-version: '1.2' diff --git a/tests/tests_manifests/data/opensearch-bundle-1.1.0.yml b/tests/tests_manifests/data/opensearch-bundle-1.1.0.yml index ca6885a52b..6ac129ee01 100644 --- a/tests/tests_manifests/data/opensearch-bundle-1.1.0.yml +++ b/tests/tests_manifests/data/opensearch-bundle-1.1.0.yml @@ -1,4 +1,5 @@ build: + platform: linux architecture: x64 id: c3ff7a232d25403fa8cc14c97799c323 location: bundle/opensearch-1.1.0-linux-x64.tar.gz @@ -70,4 +71,4 @@ components: name: dashboards-notebooks ref: main repository: https://github.com/opensearch-project/dashboards-notebooks.git -schema-version: '1.0' +schema-version: '1.1' diff --git a/tests/tests_manifests/test_build_manifest.py b/tests/tests_manifests/test_build_manifest.py index b539a0fcff..ba701c97c6 100644 --- a/tests/tests_manifests/test_build_manifest.py +++ b/tests/tests_manifests/test_build_manifest.py @@ -24,7 +24,7 @@ def setUp(self): self.manifest = BuildManifest.from_path(self.manifest_filename) def test_build(self): - self.assertEqual(self.manifest.version, "1.1") + self.assertEqual(self.manifest.version, "1.2") self.assertEqual(self.manifest.build.name, "OpenSearch") self.assertEqual(self.manifest.build.version, "1.1.0") self.assertEqual(len(self.manifest.components), 15) @@ -52,8 +52,9 @@ def test_to_dict(self): def test_get_manifest_relative_location(self): actual = BuildManifest.get_build_manifest_relative_location( - "25", "1.1.0", "x64" + "25", "1.1.0", "linux", "x64" ) + # TODO: use platform, https://github.com/opensearch-project/opensearch-build/issues/669 expected = "builds/1.1.0/25/x64/manifest.yml" self.assertEqual( actual, expected, "the manifest relative location is not as expected" @@ -76,12 +77,14 @@ def test_from_s3(self, mock_s3_bucket, *mocks): s3_download_path = BuildManifest.get_build_manifest_relative_location( self.manifest.build.id, self.manifest.build.version, + self.manifest.build.platform, self.manifest.build.architecture, ) BuildManifest.from_s3( "bucket_name", self.manifest.build.id, self.manifest.build.version, + self.manifest.build.platform, self.manifest.build.architecture, "/xyz", ) diff --git a/tests/tests_manifests/test_bundle_manifest.py b/tests/tests_manifests/test_bundle_manifest.py index fa50b0840f..9636d5bb08 100644 --- a/tests/tests_manifests/test_bundle_manifest.py +++ b/tests/tests_manifests/test_bundle_manifest.py @@ -24,12 +24,13 @@ def setUp(self): self.manifest = BundleManifest.from_path(self.manifest_filename) def test_build(self): - self.assertEqual(self.manifest.version, "1.0") + self.assertEqual(self.manifest.version, "1.1") self.assertEqual(self.manifest.build.name, "OpenSearch") self.assertEqual(self.manifest.build.version, "1.1.0") self.assertEqual( self.manifest.build.location, "bundle/opensearch-1.1.0-linux-x64.tar.gz" ) + self.assertEqual(self.manifest.build.platform, "linux") self.assertEqual(self.manifest.build.architecture, "x64") self.assertEqual(len(self.manifest.components), 13) @@ -57,23 +58,25 @@ def test_to_dict(self): def test_get_manifest_relative_location(self): actual = BundleManifest.get_bundle_manifest_relative_location( - "25", "1.1.0", "x64" + "25", "1.1.0", "linux", "x64" ) + # TODO: use platform, https://github.com/opensearch-project/opensearch-build/issues/669 expected = "bundles/1.1.0/25/x64/manifest.yml" self.assertEqual( actual, expected, "the manifest relative location is not as expected" ) def test_get_tarball_relative_location(self): - actual = BundleManifest.get_tarball_relative_location("25", "1.1.0", "x64") - expected = "bundles/1.1.0/25/x64/opensearch-1.1.0-linux-x64.tar.gz" + actual = BundleManifest.get_tarball_relative_location("25", "1.1.0", "darwin", "x64") + # TODO: use platform, https://github.com/opensearch-project/opensearch-build/issues/669 + expected = "bundles/1.1.0/25/x64/opensearch-1.1.0-darwin-x64.tar.gz" self.assertEqual( actual, expected, "the tarball relative location is not as expected" ) def test_get_tarball_name(self): - actual = BundleManifest.get_tarball_name("1.1.0", "x64") - expected = "opensearch-1.1.0-linux-x64.tar.gz" + actual = BundleManifest.get_tarball_name("1.1.0", "darwin", "x64") + expected = "opensearch-1.1.0-darwin-x64.tar.gz" self.assertEqual(actual, expected, "the tarball name is not as expected") @patch("os.remove") @@ -85,12 +88,14 @@ def test_from_s3(self, mock_s3_bucket, *mocks): s3_download_path = BundleManifest.get_bundle_manifest_relative_location( self.manifest.build.id, self.manifest.build.version, + self.manifest.build.platform, self.manifest.build.architecture, ) BundleManifest.from_s3( "bucket_name", self.manifest.build.id, self.manifest.build.version, + self.manifest.build.platform, self.manifest.build.architecture, "/xyz", ) diff --git a/tests/tests_test_workflow/test_bwc_workflow/bwc_test/data/test_manifest.yaml b/tests/tests_test_workflow/test_bwc_workflow/bwc_test/data/test_manifest.yaml index 6e6ffad5bb..1d282fb27c 100644 --- a/tests/tests_test_workflow/test_bwc_workflow/bwc_test/data/test_manifest.yaml +++ b/tests/tests_test_workflow/test_bwc_workflow/bwc_test/data/test_manifest.yaml @@ -1,4 +1,5 @@ build: + platform: linux architecture: x64 id: 41d5ae25183d4e699e92debfbe3f83bd location: https://artifacts.opensearch.org/bundles/1.0.0/41d5ae25183d4e699e92debfbe3f83bd/opensearch-1.0.0-linux-x64.tar.gz @@ -60,4 +61,4 @@ components: name: dashboards-notebooks ref: "1.0.0.0" repository: https://github.com/opensearch-project/dashboards-notebooks.git -schema-version: '1.0' \ No newline at end of file +schema-version: '1.1' \ No newline at end of file diff --git a/tests/tests_test_workflow/test_bwc_workflow/bwc_test/test_run_bwc_test.py b/tests/tests_test_workflow/test_bwc_workflow/bwc_test/test_run_bwc_test.py index 64f4cec5ea..82b86a3021 100644 --- a/tests/tests_test_workflow/test_bwc_workflow/bwc_test/test_run_bwc_test.py +++ b/tests/tests_test_workflow/test_bwc_workflow/bwc_test/test_run_bwc_test.py @@ -15,13 +15,14 @@ class TestRunBwcTest(unittest.TestCase): @contextmanager def __mock_args(self): with patch("run_bwc_test.TestArgs") as mock_test_args: - mock_test_args.s3_bucket = 's3bucket' - mock_test_args.architecture = 'x64' - mock_test_args.opensearch_version = '1.1.0' + mock_test_args.s3_bucket = "s3bucket" + mock_test_args.platform = "linux" + mock_test_args.architecture = "x64" + mock_test_args.opensearch_version = "1.1.0" mock_test_args.build_id = 100 mock_test_args.test_run_id = 1 mock_test_args.keep = False - mock_test_args.logging_level = 'INFO' + mock_test_args.logging_level = "INFO" yield mock_test_args @patch("run_bwc_test.console") diff --git a/tests/tests_test_workflow/test_integ_workflow/integ_test/data/build_manifest.yml b/tests/tests_test_workflow/test_integ_workflow/integ_test/data/build_manifest.yml index 8978941b5b..bc885600a1 100644 --- a/tests/tests_test_workflow/test_integ_workflow/integ_test/data/build_manifest.yml +++ b/tests/tests_test_workflow/test_integ_workflow/integ_test/data/build_manifest.yml @@ -1,4 +1,5 @@ build: + platform: linux architecture: x64 id: c3ff7a232d25403fa8cc14c97799c323 name: OpenSearch @@ -2480,4 +2481,4 @@ components: ref: main repository: https://github.com/opensearch-project/dashboards-notebooks.git version: 1.1.0.0 -schema-version: '1.1' +schema-version: '1.2' diff --git a/tests/tests_test_workflow/test_integ_workflow/integ_test/data/build_manifest_missing_components.yml b/tests/tests_test_workflow/test_integ_workflow/integ_test/data/build_manifest_missing_components.yml index f08783169d..46ddba2dbe 100644 --- a/tests/tests_test_workflow/test_integ_workflow/integ_test/data/build_manifest_missing_components.yml +++ b/tests/tests_test_workflow/test_integ_workflow/integ_test/data/build_manifest_missing_components.yml @@ -1,4 +1,5 @@ build: + platform: linux architecture: x64 id: c3ff7a232d25403fa8cc14c97799c323 name: OpenSearch @@ -2460,4 +2461,4 @@ components: ref: main repository: https://github.com/opensearch-project/dashboards-notebooks.git version: 1.1.0.0 -schema-version: '1.1' +schema-version: '1.2' diff --git a/tests/tests_test_workflow/test_integ_workflow/integ_test/data/bundle_manifest.yml b/tests/tests_test_workflow/test_integ_workflow/integ_test/data/bundle_manifest.yml index 77cc223b32..907d3e299a 100644 --- a/tests/tests_test_workflow/test_integ_workflow/integ_test/data/bundle_manifest.yml +++ b/tests/tests_test_workflow/test_integ_workflow/integ_test/data/bundle_manifest.yml @@ -1,4 +1,5 @@ build: + platform: linux architecture: x64 id: c3ff7a232d25403fa8cc14c97799c323 location: bundle/opensearch-1.1.0-linux-x64.tar.gz @@ -70,4 +71,4 @@ components: name: dashboards-notebooks ref: main repository: https://github.com/opensearch-project/dashboards-notebooks.git -schema-version: '1.0' +schema-version: '1.1' diff --git a/tests/tests_test_workflow/test_integ_workflow/integ_test/test_local_test_cluster.py b/tests/tests_test_workflow/test_integ_workflow/integ_test/test_local_test_cluster.py index c2ea16b5fe..2185d117f4 100644 --- a/tests/tests_test_workflow/test_integ_workflow/integ_test/test_local_test_cluster.py +++ b/tests/tests_test_workflow/test_integ_workflow/integ_test/test_local_test_cluster.py @@ -41,7 +41,7 @@ def setUp(self, mock_test_recorder): True, "with-security", mock_test_recorder, - "dummy-bucket" + "dummy-bucket", ) def tearDown(self): @@ -114,11 +114,14 @@ def test_download(self, mock_s3_bucket, *mocks): s3_path = BundleManifest.get_tarball_relative_location( self.manifest.build.id, self.manifest.build.version, + self.manifest.build.platform, self.manifest.build.architecture, ) work_dir_path = os.path.join(self.work_dir.name, "local-test-cluster") bundle_name = BundleManifest.get_tarball_name( - self.manifest.build.version, self.manifest.build.architecture + self.manifest.build.version, + self.manifest.build.platform, + self.manifest.build.architecture, ) self.local_test_cluster.download() os.chdir.assert_called_once_with(work_dir_path) @@ -148,7 +151,7 @@ def test_wait_for_service_cluster_unavailable(self, mock_test_recorder, *mocks): False, "without-security", mock_test_recorder, - "dummy-bucket" + "dummy-bucket", ) with self.assertRaises(ClusterCreationException) as err: local_test_cluster.wait_for_service() @@ -158,17 +161,27 @@ def test_wait_for_service_cluster_unavailable(self, mock_test_recorder, *mocks): auth=("admin", "admin"), ) self.assertEqual( - str(err.exception), - "Cluster is not available after 10 attempts", + str(err.exception), "Cluster is not available after 10 attempts" ) - @patch("test_workflow.integ_test.local_test_cluster.psutil.Process", side_effect=__mock_process) + @patch( + "test_workflow.integ_test.local_test_cluster.psutil.Process", + side_effect=__mock_process, + ) @patch("test_workflow.integ_test.local_test_cluster.subprocess.Popen.wait") @patch("test_workflow.integ_test.local_test_cluster.subprocess.Popen.terminate") - @patch("test_workflow.integ_test.local_test_cluster.logging", return_value=MagicMock()) - def test_terminate_process(self, mock_logging, mock_terminate, mock_wait, mock_process): - self.local_test_cluster.stdout = tempfile.NamedTemporaryFile(dir=self.local_test_cluster.work_dir) - self.local_test_cluster.stderr = tempfile.NamedTemporaryFile(dir=self.local_test_cluster.work_dir) + @patch( + "test_workflow.integ_test.local_test_cluster.logging", return_value=MagicMock() + ) + def test_terminate_process( + self, mock_logging, mock_terminate, mock_wait, mock_process + ): + self.local_test_cluster.stdout = tempfile.NamedTemporaryFile( + dir=self.local_test_cluster.work_dir + ) + self.local_test_cluster.stderr = tempfile.NamedTemporaryFile( + dir=self.local_test_cluster.work_dir + ) self.local_test_cluster.process = self.process self.local_test_cluster.terminate_process() mock_process.assert_called_once_with(self.process.pid) @@ -183,13 +196,24 @@ def test_terminate_process(self, mock_logging, mock_terminate, mock_wait, mock_p ) mock_logging.debug.assert_has_calls([call("Checking for child processes")]) - @patch("test_workflow.integ_test.local_test_cluster.psutil.Process", side_effect=__mock_process) + @patch( + "test_workflow.integ_test.local_test_cluster.psutil.Process", + side_effect=__mock_process, + ) @patch("test_workflow.integ_test.local_test_cluster.subprocess.Popen.wait") @patch("test_workflow.integ_test.local_test_cluster.subprocess.Popen.terminate") - @patch("test_workflow.integ_test.local_test_cluster.logging", return_value=MagicMock()) - def test_terminate_process_timeout(self, mock_logging, mock_terminate, mock_wait, mock_process): - self.local_test_cluster.stdout = tempfile.NamedTemporaryFile(dir=self.local_test_cluster.work_dir) - self.local_test_cluster.stderr = tempfile.NamedTemporaryFile(dir=self.local_test_cluster.work_dir) + @patch( + "test_workflow.integ_test.local_test_cluster.logging", return_value=MagicMock() + ) + def test_terminate_process_timeout( + self, mock_logging, mock_terminate, mock_wait, mock_process + ): + self.local_test_cluster.stdout = tempfile.NamedTemporaryFile( + dir=self.local_test_cluster.work_dir + ) + self.local_test_cluster.stderr = tempfile.NamedTemporaryFile( + dir=self.local_test_cluster.work_dir + ) mock_wait.side_effect = subprocess.TimeoutExpired(cmd="pass", timeout=1) with self.assertRaises(subprocess.TimeoutExpired): self.local_test_cluster.process = self.process diff --git a/tests/tests_test_workflow/test_integ_workflow/integ_test/test_run_integ_test.py b/tests/tests_test_workflow/test_integ_workflow/integ_test/test_run_integ_test.py index cfbea23029..f85f3b5e18 100644 --- a/tests/tests_test_workflow/test_integ_workflow/integ_test/test_run_integ_test.py +++ b/tests/tests_test_workflow/test_integ_workflow/integ_test/test_run_integ_test.py @@ -21,13 +21,14 @@ def setUp(self): @contextmanager def __mock_args(self): with patch("run_integ_test.TestArgs") as mock_test_args: - mock_test_args.s3_bucket = 's3bucket' - mock_test_args.architecture = 'x64' - mock_test_args.opensearch_version = '1.1.0' + mock_test_args.s3_bucket = "s3bucket" + mock_test_args.platform = "linux" + mock_test_args.architecture = "x64" + mock_test_args.opensearch_version = "1.1.0" mock_test_args.build_id = 100 mock_test_args.test_run_id = 1 mock_test_args.keep = False - mock_test_args.logging_level = 'INFO' + mock_test_args.logging_level = "INFO" yield mock_test_args @patch("run_integ_test.console") @@ -38,7 +39,14 @@ def __mock_args(self): @patch.object(BundleManifest, "from_s3") @patch.object(BuildManifest, "from_s3") @patch("run_integ_test.IntegTestSuite") - def test_run_integ_test(self, mock_integ_test_suite, mock_build_from_s3, mock_bundle_from_s3, mock_results, *mock): + def test_run_integ_test( + self, + mock_integ_test_suite, + mock_build_from_s3, + mock_bundle_from_s3, + mock_results, + *mock + ): """ test_manifest.yml has 8 plugin components listed for integration tests. This test ensures all get executed as part of integration test job. @@ -61,7 +69,14 @@ def test_run_integ_test(self, mock_integ_test_suite, mock_build_from_s3, mock_bu @patch.object(BundleManifest, "from_s3") @patch.object(BuildManifest, "from_s3") @patch("run_integ_test.IntegTestSuite") - def test_run_integ_test_failure(self, mock_integ_test_suite, mock_build_from_s3, mock_bundle_from_s3, mock_results, *mock): + def test_run_integ_test_failure( + self, + mock_integ_test_suite, + mock_build_from_s3, + mock_bundle_from_s3, + mock_results, + *mock + ): """ test_manifest.yml has 8 plugin components listed for integration tests. This test ensures all get executed as part of integration test job. diff --git a/tests/tests_test_workflow/test_perf_workflow/perf_test/data/bundle_manifest.yaml b/tests/tests_test_workflow/test_perf_workflow/perf_test/data/bundle_manifest.yaml index 2ae8888d77..c3203b70be 100644 --- a/tests/tests_test_workflow/test_perf_workflow/perf_test/data/bundle_manifest.yaml +++ b/tests/tests_test_workflow/test_perf_workflow/perf_test/data/bundle_manifest.yaml @@ -1,4 +1,5 @@ build: + platform: linux architecture: x64 id: 41d5ae25183d4e699e92debfbe3f83bd location: https://artifacts.opensearch.org/bundles/1.0.0/41d5ae25183d4e699e92debfbe3f83bd/opensearch-1.0.0-linux-x64.tar.gz @@ -60,4 +61,4 @@ components: name: dashboards-notebooks ref: 1.0.0.0 repository: https://github.com/opensearch-project/dashboards-notebooks.git -schema-version: '1.0' +schema-version: '1.1' diff --git a/tests/tests_test_workflow/test_test_args.py b/tests/tests_test_workflow/test_test_args.py index 246b66b2d5..447f25d14b 100644 --- a/tests/tests_test_workflow/test_test_args.py +++ b/tests/tests_test_workflow/test_test_args.py @@ -22,6 +22,8 @@ class TestTestArgs(unittest.TestCase): "1.1.0", "--build-id", "30", + "--platform", + "linux", "--architecture", "x64", "--test-run-id", @@ -32,6 +34,7 @@ def test_required_arguments(self): self.assertEqual(TestArgs().s3_bucket, "xyz") self.assertEqual(TestArgs().opensearch_version, "1.1.0") self.assertEqual(TestArgs().build_id, 30) + self.assertEqual(TestArgs().platform, "linux") self.assertEqual(TestArgs().architecture, "x64") self.assertEqual(TestArgs().test_run_id, 6) @@ -45,6 +48,8 @@ def test_required_arguments(self): "1.1.0", "--build-id", "30", + "--platform", + "linux", "--architecture", "xyz", "--test-run-id", @@ -55,6 +60,28 @@ def test_invalid_architecture(self): with self.assertRaises(SystemExit): self.assertEqual(TestArgs().architecture, "invalid") + @patch( + "argparse._sys.argv", + [ + ARGS_PY, + "--s3-bucket", + "xyz", + "--opensearch-version", + "1.1.0", + "--build-id", + "30", + "--platform", + "xyz", + "--architecture", + "x64", + "--test-run-id", + "6", + ], + ) + def test_invalid_platform(self): + with self.assertRaises(SystemExit): + self.assertEqual(TestArgs().platform, "invalid") + @patch( "argparse._sys.argv", [ @@ -65,6 +92,8 @@ def test_invalid_architecture(self): "1111", "--build-id", "30", + "--platform", + "linux", "--architecture", "x64", "--test-run-id", @@ -86,6 +115,8 @@ def test_invalid_version(self): "1.1.0", "--build-id", "30", + "--platform", + "linux", "--architecture", "x64", "--test-run-id", @@ -105,6 +136,8 @@ def test_keep_default(self): "1.1.0", "--build-id", "30", + "--platform", + "linux", "--architecture", "x64", "--test-run-id", @@ -125,6 +158,8 @@ def test_keep_true(self): "1.1.0", "--build-id", "30", + "--platform", + "linux", "--architecture", "x64", "--test-run-id", @@ -144,6 +179,8 @@ def test_verbose_default(self): "1.1.0", "--build-id", "30", + "--platform", + "linux", "--architecture", "x64", "--test-run-id",