From 7aff123997502af885138260dd1b7037160fc42c Mon Sep 17 00:00:00 2001 From: Pallab Pain Date: Tue, 6 Dec 2022 15:42:24 +0530 Subject: [PATCH] feat(managedservice): add support for rapyuta.io `managedservices` This commit introduces support for rapyuta.io managed services. Usage: rio managedservice [OPTIONS] COMMAND [ARGS]... Managed Services on rapyuta.io With managed services on rapyuta.io, you can provision services like elasticsearch, etc. on-demand and use them with your deployments. Options: --help Show this message and exit. Commands: inspect Inspect a managedservice instance list List all the managedservice instances providers List available managedservice providers You can create and delete managed services with the rio apply command and it currently doesn't have a directly CLI option. Reviewed By: Ankit Gadiya --- jsonschema/deployment-schema.yaml | 22 ++++ jsonschema/managedservice-schema.yaml | 60 ++++++++++ riocli/apply/__init__.py | 14 ++- riocli/apply/manifests/deployment.yaml | 4 + riocli/apply/manifests/managedservice.yaml | 9 ++ riocli/apply/parse.py | 129 +++++++++++--------- riocli/apply/resolver.py | 33 ++++-- riocli/apply/util.py | 14 ++- riocli/auth/staging.py | 5 +- riocli/bootstrap.py | 2 + riocli/deployment/model.py | 14 ++- riocli/deployment/validation.py | 50 +++++++- riocli/disk/util.py | 10 +- riocli/managedservice/__init__.py | 40 +++++++ riocli/managedservice/inspect.py | 35 ++++++ riocli/managedservice/list.py | 50 ++++++++ riocli/managedservice/list_providers.py | 45 +++++++ riocli/managedservice/model.py | 58 +++++++++ riocli/managedservice/util.py | 107 +++++++++++++++++ riocli/managedservice/validation.py | 130 +++++++++++++++++++++ riocli/model/base.py | 46 +++++--- 21 files changed, 774 insertions(+), 103 deletions(-) create mode 100644 jsonschema/managedservice-schema.yaml create mode 100644 riocli/apply/manifests/managedservice.yaml create mode 100644 riocli/managedservice/__init__.py create mode 100644 riocli/managedservice/inspect.py create mode 100644 riocli/managedservice/list.py create mode 100644 riocli/managedservice/list_providers.py create mode 100644 riocli/managedservice/model.py create mode 100644 riocli/managedservice/util.py create mode 100644 riocli/managedservice/validation.py diff --git a/jsonschema/deployment-schema.yaml b/jsonschema/deployment-schema.yaml index 223b7ff3..fcf4fbda 100644 --- a/jsonschema/deployment-schema.yaml +++ b/jsonschema/deployment-schema.yaml @@ -105,6 +105,12 @@ definitions: nameOrGUID: type: string + managedServiceSpec: + type: object + properties: + depends: + "$ref": "#/definitions/managedServiceDepends" + componentSpec: properties: runtime: @@ -176,6 +182,11 @@ definitions: items: "$ref": "#/definitions/cloudNetworkAttachSpec" + managedServices: + type: array + items: + "$ref": "#/definitions/managedServiceSpec" + stringMap: type: object additionalProperties: @@ -273,3 +284,14 @@ definitions: type: string guid: type: string + managedServiceDepends: + properties: + kind: + const: managedservice + default: managedservice + nameOrGUID: + type: string + guid: + type: string + + diff --git a/jsonschema/managedservice-schema.yaml b/jsonschema/managedservice-schema.yaml new file mode 100644 index 00000000..65ce5438 --- /dev/null +++ b/jsonschema/managedservice-schema.yaml @@ -0,0 +1,60 @@ +--- +$schema: "http://json-schema.org/draft-07/schema#" +title: ManagedService +$ref: "#/definitions/managedservice" +definitions: + managedservice: + type: object + properties: + apiVersion: + const: apiextensions.rapyuta.io/v1 + default: apiextensions.rapyuta.io/v1 + kind: + const: ManagedService + default: ManagedService + metadata: + "$ref": "#/definitions/metadata" + spec: + "$ref": "#/definitions/managedserviceSpec" + required: + - apiVersion + - kind + - metadata + - spec + metadata: + type: object + properties: + name: + type: string + guid: + "$ref": "#/definitions/uuid" + creator: + "$ref": "#/definitions/uuid" + project: + "$ref": "#/definitions/projectGUID" + labels: + "$ref": "#/definitions/stringMap" + required: + - name + projectGUID: + type: string + pattern: "^project-[a-z]{24}$" + stringMap: + type: object + additionalProperties: + type: string + uuid: + type: string + pattern: "^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$" + managedserviceSpec: + type: object + properties: + provider: + type: string + enum: + - elasticsearch + config: + type: object + required: + - provider + - config diff --git a/riocli/apply/__init__.py b/riocli/apply/__init__.py index 2bad685b..69fc1b4b 100644 --- a/riocli/apply/__init__.py +++ b/riocli/apply/__init__.py @@ -20,6 +20,7 @@ from riocli.apply.util import process_files_values_secrets from riocli.apply.explain import explain + @click.command( 'apply', cls=HelpColorsCommand, @@ -35,20 +36,20 @@ def apply(values: str, secrets: str, files: Iterable[str], dryrun: bool = False, """ Apply resource manifests """ - glob_files, abs_values, abs_secrets = process_files_values_secrets(files, values, secrets) + glob_files, abs_values, abs_secrets = process_files_values_secrets( + files, values, secrets) if len(glob_files) == 0: click.secho('no files specified', fg='red') raise SystemExit(1) - + click.secho("----- Files Processed ----", fg="yellow") for file in glob_files: click.secho(file, fg="yellow") - - + rc = Applier(glob_files, abs_values, abs_secrets) rc.parse_dependencies() - + rc.apply(dryrun=dryrun, workers=workers) @@ -66,7 +67,8 @@ def delete(values: str, secrets: str, files: Iterable[str], dryrun: bool = False """ Apply resource manifests """ - glob_files, abs_values, abs_secrets = process_files_values_secrets(files, values, secrets) + glob_files, abs_values, abs_secrets = process_files_values_secrets( + files, values, secrets) if len(glob_files) == 0: click.secho('no files specified', fg='red') diff --git a/riocli/apply/manifests/deployment.yaml b/riocli/apply/manifests/deployment.yaml index ef64850a..063d2fc7 100644 --- a/riocli/apply/manifests/deployment.yaml +++ b/riocli/apply/manifests/deployment.yaml @@ -40,3 +40,7 @@ spec: - depends: kind: network nameOrGUID: "routed" + managedServices: + - depends: + kind: managedservice + nameOrGUID: "test-service" diff --git a/riocli/apply/manifests/managedservice.yaml b/riocli/apply/manifests/managedservice.yaml new file mode 100644 index 00000000..3bb8cecb --- /dev/null +++ b/riocli/apply/manifests/managedservice.yaml @@ -0,0 +1,9 @@ +apiVersion: "apiextensions.rapyuta.io/v1" +kind: ManagedService +metadata: + name: "elastic-test" + labels: + creator: riocli +spec: + provider: elasticsearch + config: {} \ No newline at end of file diff --git a/riocli/apply/parse.py b/riocli/apply/parse.py index e1460a9d..e09da9db 100644 --- a/riocli/apply/parse.py +++ b/riocli/apply/parse.py @@ -11,9 +11,9 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import os import copy import json +import os import queue import threading import typing @@ -29,9 +29,10 @@ from riocli.config import Configuration from riocli.utils import run_bash from riocli.utils.mermaid import mermaid_link, mermaid_safe - + + class Applier(object): - DEFAULT_MAX_WORKERS=6 + DEFAULT_MAX_WORKERS = 6 EXPECTED_TIME = { "organization": 3, @@ -65,29 +66,31 @@ def __init__(self, files: typing.List, values, secrets): self.environment = jinja2.Environment() if values: - self.values = self._load_file_content(values, is_value=True, is_secret=False)[0] - - if secrets: - self.secrets = self._load_file_content(secrets, is_value=True, is_secret=True)[0] + self.values = self._load_file_content( + values, is_value=True, is_secret=False)[0] + if secrets: + self.secrets = self._load_file_content( + secrets, is_value=True, is_secret=True)[0] self._process_file_list(files) - - #Public Functions + + # Public Functions def order(self): return self.graph.static_order() def apply(self, *args, **kwargs): - kwargs['workers'] = int(kwargs.get('workers') or self.DEFAULT_MAX_WORKERS) - if kwargs['workers'] == 1 : + kwargs['workers'] = int(kwargs.get('workers') + or self.DEFAULT_MAX_WORKERS) + if kwargs['workers'] == 1: return self.apply_sync(*args, **kwargs) - else: - return self.apply_async(*args, **kwargs) + + return self.apply_async(*args, **kwargs) def apply_async(self, *args, **kwargs): WORKERS = int(kwargs.get('workers') or self.DEFAULT_MAX_WORKERS) task_queue = queue.Queue() - done_queue = queue.Queue() + done_queue = queue.Queue() def worker(): while True: @@ -97,27 +100,27 @@ def worker(): try: self._apply_manifest(obj, *args, **kwargs) except Exception as ex: - click.secho('[Err] Object "{}" apply failed. Apply will not progress further.'.format(obj, str(ex))) + click.secho( + '[Err] Object "{}" apply failed. Apply will not progress further.'.format(obj, str(ex))) raise ex task_queue.task_done() done_queue.put(obj) - - + worker_list = [] for worker_id in range(0, WORKERS): worker_list.append(threading.Thread(target=worker, daemon=True)) worker_list[worker_id].start() - + self.graph.prepare() while self.graph.is_active(): for obj in self.graph.get_ready(): # if obj in self.resolved_objects and 'manifest' in self.resolved_objects[obj]: task_queue.put(obj) - + done_obj = done_queue.get() self.graph.done(done_obj) - + task_queue.join() def apply_sync(self, *args, **kwargs): @@ -128,14 +131,13 @@ def apply_sync(self, *args, **kwargs): self._apply_manifest(obj, *args, **kwargs) self.graph.done(obj) - def delete(self, *args, **kwargs): delete_order = list(self.graph.static_order()) delete_order.reverse() for obj in delete_order: if obj in self.resolved_objects and 'manifest' in self.resolved_objects[obj]: self._delete_manifest(obj, *args, **kwargs) - + def parse_dependencies(self, check_missing=True, delete=False): number_of_objects = 0 for f, data in self.files.items(): @@ -149,17 +151,19 @@ def parse_dependencies(self, check_missing=True, delete=False): total_time = 0 for node in copy.deepcopy(self.graph).static_order(): - + action = 'CREATE' if not self.resolved_objects[node]['src'] == 'remote' else 'UPDATE' if delete: action = 'DELETE' kind = node.split(":")[0] - expected_time = round(self.EXPECTED_TIME.get(kind.lower(), 5)/60, 2) + expected_time = round( + self.EXPECTED_TIME.get(kind.lower(), 5)/60, 2) total_time = total_time + expected_time resource_list.append([node, action, expected_time]) - self._display_context(total_time=total_time, total_objects=number_of_objects, resource_list=resource_list) - + self._display_context( + total_time=total_time, total_objects=number_of_objects, resource_list=resource_list) + if check_missing: missing_resources = [] for key, item in self.resolved_objects.items(): @@ -167,12 +171,12 @@ def parse_dependencies(self, check_missing=True, delete=False): missing_resources.append(key) if missing_resources: - click.secho("missing resources found in yaml. " + \ - "Plese ensure the following are either available in your yaml" + \ + click.secho("missing resources found in yaml. " + + "Plese ensure the following are either available in your yaml" + "or created on the server. {}".format(set(missing_resources)), fg="red") raise SystemExit(1) - #Manifest Operations via base.py + # Manifest Operations via base.py def _apply_manifest(self, obj_key, *args, **kwargs): obj = self.objects[obj_key] cls = ResolverCache.get_model(obj) @@ -187,8 +191,8 @@ def _delete_manifest(self, obj_key, *args, **kwargs): setattr(ist, 'rc', ResolverCache(self.client)) ist.delete(self.client, obj, *args, **kwargs) + # File Loading Operations - #File Loading Operations def _process_file_list(self, files): for f in files: data = self._load_file_content(f) @@ -213,9 +217,9 @@ def _load_file_content(self, file_name, is_value=False, is_secret=False): data = opened.read() else: data = run_bash('sops -d {}'.format(file_name)) - + # TODO: If no Kind in yaml/json, then skip - if not (is_value or is_secret): + if not (is_value or is_secret): if self.environment or file_name.endswith('.j2'): template = self.environment.from_string(data) template_args = self.values @@ -224,9 +228,10 @@ def _load_file_content(self, file_name, is_value=False, is_secret=False): try: data = template.render(**template_args) except Exception as ex: - click.secho('{} yaml parsing error. Msg: {}'.format(file_name, str(ex))) + click.secho('{} yaml parsing error. Msg: {}'.format( + file_name, str(ex))) raise ex - + file_name = file_name.rstrip('.j2') loaded_data = [] @@ -236,16 +241,18 @@ def _load_file_content(self, file_name, is_value=False, is_secret=False): loaded = json.loads(data) loaded_data.append(loaded) except json.JSONDecodeError as ex: - ex_message = '{} yaml parsing error. Msg: {}'.format(file_name, str(ex)) + ex_message = '{} yaml parsing error. Msg: {}'.format( + file_name, str(ex)) raise Exception(ex_message) elif file_name.endswith('yaml') or file_name.endswith('yml'): try: loaded = yaml.safe_load_all(data) loaded_data = list(loaded) - + except yaml.YAMLError as e: - ex_message = '{} yaml parsing error. Msg: {}'.format(file_name, str(e)) + ex_message = '{} yaml parsing error. Msg: {}'.format( + file_name, str(e)) raise Exception(ex_message) if not loaded_data: @@ -253,17 +260,18 @@ def _load_file_content(self, file_name, is_value=False, is_secret=False): return loaded_data + # Graph Operations - #Graph Operations def _add_graph_node(self, key): self.graph.add(key) - self.diagram.append('\t{}[{}]'.format( mermaid_safe(key), key)) + self.diagram.append('\t{}[{}]'.format(mermaid_safe(key), key)) def _add_graph_edge(self, dependent_key, key): self.graph.add(dependent_key, key) - self.diagram.append('\t{}[{}] --> {}[{}] '.format( mermaid_safe(key), key, mermaid_safe(dependent_key), dependent_key)) + self.diagram.append('\t{}[{}] --> {}[{}] '.format(mermaid_safe(key), + key, mermaid_safe(dependent_key), dependent_key)) - #Dependency Resolution + # Dependency Resolution def _parse_dependency(self, dependent_key, model): for key, value in model.items(): if key == "depends": @@ -299,33 +307,37 @@ def _resolve_dependency(self, dependent_key, dependency): obj_name = self._get_attr(obj, ResolverCache.NAME_KEYS) if kind == 'package': - if (guid and obj_guid == guid): - self._add_remote_object_to_resolve_tree(dependent_key, obj_guid, dependency, obj) + if guid and obj_guid == guid: + self._add_remote_object_to_resolve_tree( + dependent_key, obj_guid, dependency, obj) if (name_or_guid == obj_name) and ( 'version' in dependency and obj['packageVersion'] == dependency.get('version')): - self._add_remote_object_to_resolve_tree(dependent_key, obj_guid, dependency, obj) + self._add_remote_object_to_resolve_tree( + dependent_key, obj_guid, dependency, obj) # Special handling for Static route since it doesn't have a name field. # StaticRoute sends a URLPrefix field with name being the prefix along with short org guid. elif kind == 'staticroute' and name_or_guid in obj_name: - self._add_remote_object_to_resolve_tree(dependent_key, obj_guid, dependency, obj) + self._add_remote_object_to_resolve_tree( + dependent_key, obj_guid, dependency, obj) elif (guid and obj_guid == guid) or (name_or_guid == obj_name): - self._add_remote_object_to_resolve_tree(dependent_key, obj_guid, dependency, obj) + self._add_remote_object_to_resolve_tree( + dependent_key, obj_guid, dependency, obj) self.dependencies[kind][name_or_guid] = {'local': True} self._add_graph_edge(dependent_key, key) if key not in self.resolved_objects: self.resolved_objects[key] = {'src': 'missing'} - + def _add_remote_object_to_resolve_tree(self, dependent_key, guid, dependency, obj): kind = dependency.get('kind') name_or_guid = dependency.get('nameOrGUID') key = '{}:{}'.format(kind, name_or_guid) - - self.dependencies[kind][name_or_guid] = {'guid': guid, 'raw': obj, 'local': False} + self.dependencies[kind][name_or_guid] = { + 'guid': guid, 'raw': obj, 'local': False} if key not in self.resolved_objects: self.resolved_objects[key] = {} self.resolved_objects[key]['guid'] = guid @@ -333,7 +345,7 @@ def _add_remote_object_to_resolve_tree(self, dependent_key, guid, dependency, ob self.resolved_objects[key]['src'] = 'remote' self._add_graph_edge(dependent_key, key) - + dependency['guid'] = guid if kind.lower() == "disk": dependency['depGuid'] = obj['internalDeploymentGUID'] @@ -345,10 +357,10 @@ def _initialize_kind_dependency(self, kind): if not self.dependencies.get(kind): self.dependencies[kind] = {} - #Utils + # Utils def _display_context(self, total_time: int, total_objects: int, resource_list: typing.List) -> None: # Display context - + if os.environ.get('MERMAID'): diagram_link = mermaid_link("\n".join(self.diagram)) click.launch(diagram_link) @@ -359,7 +371,8 @@ def _display_context(self, total_time: int, total_objects: int, resource_list: t ['Files', len(self.files)], ['Resources', total_objects], ] - click.echo(tabulate(context, headers=headers, tablefmt='simple', numalign='center')) + click.echo(tabulate(context, headers=headers, + tablefmt='simple', numalign='center')) # Display Resource Inventory headers = [] @@ -368,14 +381,22 @@ def _display_context(self, total_time: int, total_objects: int, resource_list: t col, _ = get_terminal_size() click.secho(" " * col, bg='blue') - click.echo(tabulate(resource_list, headers=headers, tablefmt='simple', numalign='center')) + click.echo(tabulate(resource_list, headers=headers, + tablefmt='simple', numalign='center')) click.secho(" " * col, bg='blue') @staticmethod def _get_attr(obj, accept_keys): + metadata = None + + if hasattr(obj, 'metadata'): + metadata = getattr(obj, 'metadata') + for key in accept_keys: if hasattr(obj, key): return getattr(obj, key) + if metadata is not None and hasattr(metadata, key): + return getattr(metadata, key) raise Exception('guid resolve failed') diff --git a/riocli/apply/resolver.py b/riocli/apply/resolver.py index 83bd712a..8fb648c9 100644 --- a/riocli/apply/resolver.py +++ b/riocli/apply/resolver.py @@ -25,6 +25,7 @@ from riocli.deployment.model import Deployment from riocli.device.model import Device from riocli.disk.model import Disk +from riocli.managedservice.model import ManagedService from riocli.network.model import Network from riocli.package.model import Package from riocli.project.model import Project @@ -37,7 +38,8 @@ class _Singleton(type): def __call__(cls, *args, **kwargs): if cls not in cls._instances: - cls._instances[cls] = super(_Singleton, cls).__call__(*args, **kwargs) + cls._instances[cls] = super( + _Singleton, cls).__call__(*args, **kwargs) return cls._instances[cls] @@ -52,6 +54,7 @@ class ResolverCache(object, metaclass=_Singleton): 'Package': Package, 'Disk': Disk, 'Deployment': Deployment, + "ManagedService": ManagedService } KIND_REGEX = { @@ -66,6 +69,7 @@ class ResolverCache(object, metaclass=_Singleton): "network": "^net-[a-z]{24}$", "device": "^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$", "user": "^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$", + "managedservice": "^[a-zA-Z][a-zA-Z0-9_-]$" } GUID_KEYS = ['guid', 'GUID', 'uuid', 'ID', 'Id', 'id'] @@ -90,12 +94,12 @@ def find_guid(self, name, kind, *args): def find_depends(self, depends, *args): if 'depGuid' in depends and depends['kind'] == 'disk': return depends['depGuid'], None - elif 'guid' in depends and depends['kind'] != 'network': + elif 'guid' in depends and depends['kind'] not in ('network', 'managedservice'): return depends['guid'], None - elif 'nameOrGUID' in depends: obj_list = self._list_functors(depends['kind'])() - obj_match = list(self._find_functors(depends['kind'])(depends['nameOrGUID'], obj_list, *args)) + obj_match = list(self._find_functors(depends['kind'])( + depends['nameOrGUID'], obj_list, *args)) if not obj_list or (isinstance(obj_list, list) and len(obj_list) == 0): return None, None if obj_match and isinstance(obj_match, list) and len(obj_match) > 0: @@ -112,9 +116,11 @@ def _guid_functor(self, kind): "staticroute": lambda x: munchify(x)['guid'], "build": lambda x: munchify(x)['guid'], "deployment": lambda x: munchify(x)['deploymentId'], - "network": lambda x: munchify(x).guid, - "disk": lambda x: munchify(x)['internalDeploymentGUID'], #This is only temporarity like this - "device": lambda x: munchify(x)['uuid'] + "network": lambda x: munchify(x)['guid'], + # This is only temporarily like this + "disk": lambda x: munchify(x)['internalDeploymentGUID'], + "device": lambda x: munchify(x)['uuid'], + "managedservice": lambda x: munchify(x)['metadata']['name'], } return mapping[kind] @@ -131,6 +137,7 @@ def _list_functors(self, kind): "network": self._list_networks, "disk": self._list_disks, "device": self.client.get_all_devices, + "managedservice": self._list_managedservices, } return mapping[kind] @@ -146,6 +153,7 @@ def _find_functors(self, kind): "network": self._generate_find_guid_functor(), "disk": self._generate_find_guid_functor(), "device": self._generate_find_guid_functor(), + "managedservice": lambda name, instances: filter(lambda i: i.metadata.name == name, instances), } return mapping[kind] @@ -163,20 +171,27 @@ def _list_networks(self): if routed: networks.extend(routed) + return networks def _list_disks(self): config = Configuration() - catalog_host = config.data.get('catalog_host', 'https://gacatalog.apps.rapyuta.io') + catalog_host = config.data.get( + 'catalog_host', 'https://gacatalog.apps.rapyuta.io') url = '{}/disk'.format(catalog_host) headers = config.get_auth_header() - response = RestClient(url).method(HttpMethod.GET).headers(headers).execute() + response = RestClient(url).method( + HttpMethod.GET).headers(headers).execute() data = json.loads(response.text) if not response.ok: err_msg = data.get('error') raise Exception(err_msg) return munchify(data) + def _list_managedservices(self): + instances = ManagedService.list_instances() + return munchify(instances) + @classmethod def _maybe_guid(cls, kind: str, name_or_guid: str) -> typing.Union[str, None]: if re.fullmatch(cls.KIND_REGEX[kind], name_or_guid): diff --git a/riocli/apply/util.py b/riocli/apply/util.py index 702379e2..488073e1 100644 --- a/riocli/apply/util.py +++ b/riocli/apply/util.py @@ -12,27 +12,29 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os import glob +import os + def parse_varidac_pathargs(pathItem): glob_files = [] abs_path = os.path.abspath(pathItem) - #make it absolte - # does the path exist. + # make it absolte + # does the path exist. # is it a dir? scan recursively # not a dir but has special charaters in it? [*?^!] # assume its a valid glob, use it to glob recursively # if all else fails - # consider it a file path directly. + # consider it a file path directly. if os.path.exists(abs_path): if os.path.isdir(abs_path): - #TODO: Should we keep this recursive? + # TODO: Should we keep this recursive? glob_files = glob.glob(abs_path + "/**/*", recursive=True) else: glob_files = glob.glob(abs_path, recursive=True) return glob_files + def process_files_values_secrets(files, values, secrets): glob_files = [] @@ -52,5 +54,5 @@ def process_files_values_secrets(files, values, secrets): if abs_secrets in glob_files: glob_files.remove(abs_secrets) - glob_files = list(set(glob_files)) + glob_files = list(set(glob_files)) return glob_files, abs_values, abs_secret diff --git a/riocli/auth/staging.py b/riocli/auth/staging.py index db7e38ad..1d5c4fad 100644 --- a/riocli/auth/staging.py +++ b/riocli/auth/staging.py @@ -37,6 +37,7 @@ def environment(ctx: click.Context, name: str): ctx.obj.data.pop('catalog_host', None) ctx.obj.data.pop('core_api_host', None) ctx.obj.data.pop('rip_host', None) + ctx.obj.data.pop('v2api_host', None) else: _configure_environment(ctx.obj, name) @@ -62,13 +63,15 @@ def _configure_environment(config: Configuration, name: str) -> None: _validate_environment(name) # Named Staging environments don't have hyphen in the name. Ephemeral environments do. - name = name+'-' if name.startswith('pr') else name + # name = name+'-' if name.startswith('pr') else name catalog = 'https://{}catalog.{}'.format(name, _STAGING_ENVIRONMENT_SUBDOMAIN) core = 'https://{}apiserver.{}'.format(name, _STAGING_ENVIRONMENT_SUBDOMAIN) rip = 'https://{}rip.{}'.format(name, _STAGING_ENVIRONMENT_SUBDOMAIN) + v2api = 'https://{}api.{}'.format(name, _STAGING_ENVIRONMENT_SUBDOMAIN) config.data['environment'] = name config.data['catalog_host'] = catalog config.data['core_api_host'] = core config.data['rip_host'] = rip + config.data['v2api_host'] = v2api diff --git a/riocli/bootstrap.py b/riocli/bootstrap.py index ec042337..1b533114 100644 --- a/riocli/bootstrap.py +++ b/riocli/bootstrap.py @@ -40,6 +40,7 @@ from riocli.secret import secret from riocli.shell import shell, deprecated_repl from riocli.static_route import static_route +from riocli.managedservice import managedservice @with_plugins(iter_entry_points('riocli.plugins')) @@ -92,3 +93,4 @@ def version(): cli.add_command(disk) cli.add_command(shell) cli.add_command(deprecated_repl) +cli.add_command(managedservice) diff --git a/riocli/deployment/model.py b/riocli/deployment/model.py index af86fd17..e5a14ac1 100644 --- a/riocli/deployment/model.py +++ b/riocli/deployment/model.py @@ -107,7 +107,7 @@ def create_object(self, client: Client) -> typing.Any: disk_mounts = {} for vol in self.spec.volumes: disk_guid, disk = self.rc.find_depends(vol.depends) - if not disk_guid in disk_mounts: + if disk_guid not in disk_mounts: disk_mounts[disk_guid] = [] disk_mounts[disk_guid].append(ExecutableMount(vol.execName, vol.mountPath, vol.subPath)) @@ -116,7 +116,17 @@ def create_object(self, client: Client) -> typing.Any: disk = client.get_volume_instance(disk_guid) provision_config.mount_volume(__componentName, volume=disk, executable_mounts=disk_mounts[disk_guid]) - + + # TODO: Managed Services is currently limited to `cloud` deployments + # since we don't expose `elasticsearch` outside Openshift. This may + # change in the future. + if 'managedServices' in self.spec: + managed_services = [] + for managed_service in self.spec.managedServices: + managed_services.append({ + "instance": managed_service.depends.nameOrGUID, + }) + provision_config.context["managedServices"] = managed_services if self.spec.runtime == 'device': device_guid, device = self.rc.find_depends(self.spec.device.depends) diff --git a/riocli/deployment/validation.py b/riocli/deployment/validation.py index e5d75390..ea3e0d49 100644 --- a/riocli/deployment/validation.py +++ b/riocli/deployment/validation.py @@ -17,12 +17,12 @@ def validate(data, custom_formats={}, name_prefix=None): def validate___definitions_deployment(data, custom_formats={}, name_prefix=None): if not isinstance(data, (dict)): - raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'apiVersion': {'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, 'kind': {'const': 'Deployment', 'default': 'Deployment'}, 'metadata': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'depends': {'$ref': '#/definitions/packageDepends'}, 'labels': {'$ref': '#/definitions/stringMap', 'uniqueItems': True}, 'guid': {'$ref': '#/definitions/packageGUID'}, 'creator': {'$ref': '#/definitions/uuid'}, 'project': {'$ref': '#/definitions/projectGUID'}}, 'required': ['name', 'depends']}, 'spec': {'properties': {'runtime': {'type': 'string', 'enum': ['device', 'cloud'], 'default': 'cloud'}, 'depends': {'type': 'array', 'items': {'$ref': '#/definitions/deploymentDepends'}}}, 'dependencies': {'runtime': {'oneOf': [{'properties': {'runtime': {'type': 'string', 'enum': ['device']}, 'depends': {'type': 'object', '$ref': '#/definitions/deviceDepends'}, 'restart': {'type': 'string', 'enum': ['always', 'onfailure', 'never'], 'default': 'always'}, 'envArgs': {'type': 'array', 'items': {'$ref': '#/definitions/envArgsSpec'}}, 'volumes': {'type': 'array', 'items': {'$ref': '#/definitions/deviceVolumeAttachSpec'}}, 'rosNetworks': {'type': 'array', 'items': {'$ref': '#/definitions/deviceNetworkAttachSpec'}}}}, {'properties': {'runtime': {'type': 'string', 'enum': ['cloud']}, 'envArgs': {'type': 'array', 'items': {'$ref': '#/definitions/envArgsSpec'}}, 'volumes': {'type': 'array', 'items': {'$ref': '#/definitions/cloudVolumeAttachSpec'}}, 'staticRoutes': {'type': 'array', 'items': {'$ref': '#/definitions/endpointSpec'}}, 'rosNetworks': {'type': 'array', 'items': {'$ref': '#/definitions/cloudNetworkAttachSpec'}}}}]}}}}, 'required': ['apiVersion', 'kind', 'metadata', 'spec']}, rule='type') + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'apiVersion': {'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, 'kind': {'const': 'Deployment', 'default': 'Deployment'}, 'metadata': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'depends': {'$ref': '#/definitions/packageDepends'}, 'labels': {'$ref': '#/definitions/stringMap', 'uniqueItems': True}, 'guid': {'$ref': '#/definitions/packageGUID'}, 'creator': {'$ref': '#/definitions/uuid'}, 'project': {'$ref': '#/definitions/projectGUID'}}, 'required': ['name', 'depends']}, 'spec': {'properties': {'runtime': {'type': 'string', 'enum': ['device', 'cloud'], 'default': 'cloud'}, 'depends': {'type': 'array', 'items': {'$ref': '#/definitions/deploymentDepends'}}}, 'dependencies': {'runtime': {'oneOf': [{'properties': {'runtime': {'type': 'string', 'enum': ['device']}, 'depends': {'type': 'object', '$ref': '#/definitions/deviceDepends'}, 'restart': {'type': 'string', 'enum': ['always', 'onfailure', 'never'], 'default': 'always'}, 'envArgs': {'type': 'array', 'items': {'$ref': '#/definitions/envArgsSpec'}}, 'volumes': {'type': 'array', 'items': {'$ref': '#/definitions/deviceVolumeAttachSpec'}}, 'rosNetworks': {'type': 'array', 'items': {'$ref': '#/definitions/deviceNetworkAttachSpec'}}}}, {'properties': {'runtime': {'type': 'string', 'enum': ['cloud']}, 'envArgs': {'type': 'array', 'items': {'$ref': '#/definitions/envArgsSpec'}}, 'volumes': {'type': 'array', 'items': {'$ref': '#/definitions/cloudVolumeAttachSpec'}}, 'staticRoutes': {'type': 'array', 'items': {'$ref': '#/definitions/endpointSpec'}}, 'rosNetworks': {'type': 'array', 'items': {'$ref': '#/definitions/cloudNetworkAttachSpec'}}, 'managedServices': {'type': 'array', 'items': {'$ref': '#/definitions/managedServiceSpec'}}}}]}}}}, 'required': ['apiVersion', 'kind', 'metadata', 'spec']}, rule='type') data_is_dict = isinstance(data, dict) if data_is_dict: data_len = len(data) if not all(prop in data for prop in ['apiVersion', 'kind', 'metadata', 'spec']): - raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['apiVersion', 'kind', 'metadata', 'spec'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'apiVersion': {'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, 'kind': {'const': 'Deployment', 'default': 'Deployment'}, 'metadata': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'depends': {'$ref': '#/definitions/packageDepends'}, 'labels': {'$ref': '#/definitions/stringMap', 'uniqueItems': True}, 'guid': {'$ref': '#/definitions/packageGUID'}, 'creator': {'$ref': '#/definitions/uuid'}, 'project': {'$ref': '#/definitions/projectGUID'}}, 'required': ['name', 'depends']}, 'spec': {'properties': {'runtime': {'type': 'string', 'enum': ['device', 'cloud'], 'default': 'cloud'}, 'depends': {'type': 'array', 'items': {'$ref': '#/definitions/deploymentDepends'}}}, 'dependencies': {'runtime': {'oneOf': [{'properties': {'runtime': {'type': 'string', 'enum': ['device']}, 'depends': {'type': 'object', '$ref': '#/definitions/deviceDepends'}, 'restart': {'type': 'string', 'enum': ['always', 'onfailure', 'never'], 'default': 'always'}, 'envArgs': {'type': 'array', 'items': {'$ref': '#/definitions/envArgsSpec'}}, 'volumes': {'type': 'array', 'items': {'$ref': '#/definitions/deviceVolumeAttachSpec'}}, 'rosNetworks': {'type': 'array', 'items': {'$ref': '#/definitions/deviceNetworkAttachSpec'}}}}, {'properties': {'runtime': {'type': 'string', 'enum': ['cloud']}, 'envArgs': {'type': 'array', 'items': {'$ref': '#/definitions/envArgsSpec'}}, 'volumes': {'type': 'array', 'items': {'$ref': '#/definitions/cloudVolumeAttachSpec'}}, 'staticRoutes': {'type': 'array', 'items': {'$ref': '#/definitions/endpointSpec'}}, 'rosNetworks': {'type': 'array', 'items': {'$ref': '#/definitions/cloudNetworkAttachSpec'}}}}]}}}}, 'required': ['apiVersion', 'kind', 'metadata', 'spec']}, rule='required') + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['apiVersion', 'kind', 'metadata', 'spec'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'apiVersion': {'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, 'kind': {'const': 'Deployment', 'default': 'Deployment'}, 'metadata': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'depends': {'$ref': '#/definitions/packageDepends'}, 'labels': {'$ref': '#/definitions/stringMap', 'uniqueItems': True}, 'guid': {'$ref': '#/definitions/packageGUID'}, 'creator': {'$ref': '#/definitions/uuid'}, 'project': {'$ref': '#/definitions/projectGUID'}}, 'required': ['name', 'depends']}, 'spec': {'properties': {'runtime': {'type': 'string', 'enum': ['device', 'cloud'], 'default': 'cloud'}, 'depends': {'type': 'array', 'items': {'$ref': '#/definitions/deploymentDepends'}}}, 'dependencies': {'runtime': {'oneOf': [{'properties': {'runtime': {'type': 'string', 'enum': ['device']}, 'depends': {'type': 'object', '$ref': '#/definitions/deviceDepends'}, 'restart': {'type': 'string', 'enum': ['always', 'onfailure', 'never'], 'default': 'always'}, 'envArgs': {'type': 'array', 'items': {'$ref': '#/definitions/envArgsSpec'}}, 'volumes': {'type': 'array', 'items': {'$ref': '#/definitions/deviceVolumeAttachSpec'}}, 'rosNetworks': {'type': 'array', 'items': {'$ref': '#/definitions/deviceNetworkAttachSpec'}}}}, {'properties': {'runtime': {'type': 'string', 'enum': ['cloud']}, 'envArgs': {'type': 'array', 'items': {'$ref': '#/definitions/envArgsSpec'}}, 'volumes': {'type': 'array', 'items': {'$ref': '#/definitions/cloudVolumeAttachSpec'}}, 'staticRoutes': {'type': 'array', 'items': {'$ref': '#/definitions/endpointSpec'}}, 'rosNetworks': {'type': 'array', 'items': {'$ref': '#/definitions/cloudNetworkAttachSpec'}}, 'managedServices': {'type': 'array', 'items': {'$ref': '#/definitions/managedServiceSpec'}}}}]}}}}, 'required': ['apiVersion', 'kind', 'metadata', 'spec']}, rule='required') data_keys = set(data.keys()) if "apiVersion" in data_keys: data_keys.remove("apiVersion") @@ -159,10 +159,20 @@ def validate___definitions_componentspec(data, custom_formats={}, name_prefix=No data__rosNetworks_len = len(data__rosNetworks) for data__rosNetworks_x, data__rosNetworks_item in enumerate(data__rosNetworks): validate___definitions_cloudnetworkattachspec(data__rosNetworks_item, custom_formats, (name_prefix or "data") + ".rosNetworks[{data__rosNetworks_x}]") + if "managedServices" in data_keys: + data_keys.remove("managedServices") + data__managedServices = data["managedServices"] + if not isinstance(data__managedServices, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".managedServices must be array", value=data__managedServices, name="" + (name_prefix or "data") + ".managedServices", definition={'type': 'array', 'items': {'type': 'object', 'properties': {'depends': {'$ref': '#/definitions/managedServiceDepends'}}}}, rule='type') + data__managedServices_is_list = isinstance(data__managedServices, (list, tuple)) + if data__managedServices_is_list: + data__managedServices_len = len(data__managedServices) + for data__managedServices_x, data__managedServices_item in enumerate(data__managedServices): + validate___definitions_managedservicespec(data__managedServices_item, custom_formats, (name_prefix or "data") + ".managedServices[{data__managedServices_x}]") data_one_of_count1 += 1 except JsonSchemaValueException: pass if data_one_of_count1 != 1: - raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count1) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'properties': {'runtime': {'type': 'string', 'enum': ['device']}, 'depends': {'properties': {'kind': {'const': 'device', 'default': 'device'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}, 'restart': {'type': 'string', 'enum': ['always', 'onfailure', 'never'], 'default': 'always'}, 'envArgs': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'value': {'type': 'string'}}}}, 'volumes': {'type': 'array', 'items': {'type': 'object', 'properties': {'execName': {'type': 'string'}, 'mountPath': {'type': 'string'}, 'subPath': {'type': 'string'}}}}, 'rosNetworks': {'type': 'array', 'items': {'properties': {'depends': {'$ref': '#/definitions/networkDepends'}, 'interface': {'type': 'string'}, 'topics': {'type': 'array', 'items': {'type': 'string'}}}}}}}, {'properties': {'runtime': {'type': 'string', 'enum': ['cloud']}, 'envArgs': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'value': {'type': 'string'}}}}, 'volumes': {'type': 'array', 'items': {'type': 'object', 'properties': {'execName': {'type': 'string'}, 'mountPath': {'type': 'string'}, 'subPath': {'type': 'string'}, 'depends': {'$ref': '#/definitions/diskDepends'}}}}, 'staticRoutes': {'type': 'array', 'items': {'properties': {'name': {'type': 'string'}, 'depends': {'properties': {'kind': {'const': 'staticroute', 'default': 'staticroute'}, 'nameOrGUID': {'type': 'string'}}}}}}, 'rosNetworks': {'type': 'array', 'items': {'properties': {'depends': {'$ref': '#/definitions/networkDepends'}, 'topics': {'type': 'array', 'items': {'type': 'string'}}}}}}}]}, rule='oneOf') + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count1) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'properties': {'runtime': {'type': 'string', 'enum': ['device']}, 'depends': {'properties': {'kind': {'const': 'device', 'default': 'device'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}, 'restart': {'type': 'string', 'enum': ['always', 'onfailure', 'never'], 'default': 'always'}, 'envArgs': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'value': {'type': 'string'}}}}, 'volumes': {'type': 'array', 'items': {'type': 'object', 'properties': {'execName': {'type': 'string'}, 'mountPath': {'type': 'string'}, 'subPath': {'type': 'string'}}}}, 'rosNetworks': {'type': 'array', 'items': {'properties': {'depends': {'$ref': '#/definitions/networkDepends'}, 'interface': {'type': 'string'}, 'topics': {'type': 'array', 'items': {'type': 'string'}}}}}}}, {'properties': {'runtime': {'type': 'string', 'enum': ['cloud']}, 'envArgs': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'value': {'type': 'string'}}}}, 'volumes': {'type': 'array', 'items': {'type': 'object', 'properties': {'execName': {'type': 'string'}, 'mountPath': {'type': 'string'}, 'subPath': {'type': 'string'}, 'depends': {'$ref': '#/definitions/diskDepends'}}}}, 'staticRoutes': {'type': 'array', 'items': {'properties': {'name': {'type': 'string'}, 'depends': {'properties': {'kind': {'const': 'staticroute', 'default': 'staticroute'}, 'nameOrGUID': {'type': 'string'}}}}}}, 'rosNetworks': {'type': 'array', 'items': {'properties': {'depends': {'$ref': '#/definitions/networkDepends'}, 'topics': {'type': 'array', 'items': {'type': 'string'}}}}}, 'managedServices': {'type': 'array', 'items': {'type': 'object', 'properties': {'depends': {'$ref': '#/definitions/managedServiceDepends'}}}}}}]}, rule='oneOf') data_keys = set(data.keys()) if "runtime" in data_keys: data_keys.remove("runtime") @@ -206,6 +216,40 @@ def validate___definitions_deploymentdepends(data, custom_formats={}, name_prefi raise JsonSchemaValueException("" + (name_prefix or "data") + ".guid must be string", value=data__guid, name="" + (name_prefix or "data") + ".guid", definition={'type': 'string'}, rule='type') return data +def validate___definitions_managedservicespec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'managedservice', 'default': 'managedservice'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "depends" in data_keys: + data_keys.remove("depends") + data__depends = data["depends"] + validate___definitions_managedservicedepends(data__depends, custom_formats, (name_prefix or "data") + ".depends") + return data + +def validate___definitions_managedservicedepends(data, custom_formats={}, name_prefix=None): + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "kind" in data_keys: + data_keys.remove("kind") + data__kind = data["kind"] + if data__kind != "managedservice": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".kind must be same as const definition: managedservice", value=data__kind, name="" + (name_prefix or "data") + ".kind", definition={'const': 'managedservice', 'default': 'managedservice'}, rule='const') + else: data["kind"] = 'managedservice' + if "nameOrGUID" in data_keys: + data_keys.remove("nameOrGUID") + data__nameOrGUID = data["nameOrGUID"] + if not isinstance(data__nameOrGUID, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".nameOrGUID must be string", value=data__nameOrGUID, name="" + (name_prefix or "data") + ".nameOrGUID", definition={'type': 'string'}, rule='type') + if "guid" in data_keys: + data_keys.remove("guid") + data__guid = data["guid"] + if not isinstance(data__guid, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".guid must be string", value=data__guid, name="" + (name_prefix or "data") + ".guid", definition={'type': 'string'}, rule='type') + return data + def validate___definitions_cloudnetworkattachspec(data, custom_formats={}, name_prefix=None): data_is_dict = isinstance(data, dict) if data_is_dict: diff --git a/riocli/disk/util.py b/riocli/disk/util.py index 524a70b2..a2c71b06 100644 --- a/riocli/disk/util.py +++ b/riocli/disk/util.py @@ -56,7 +56,7 @@ def get_disk_name(client: Client, guid: str) -> str: def find_disk_guid(client: Client, name: str) -> str: - try: + try: disks = _api_call(HttpMethod.GET) for disk in disks: if disk['name'] == name: @@ -68,14 +68,16 @@ def find_disk_guid(client: Client, name: str) -> str: def _api_call(method: str, guid: typing.Union[str, None] = None, payload: typing.Union[typing.Dict, None] = None, load_response: bool = True, -) -> typing.Any: + ) -> typing.Any: config = Configuration() - catalog_host = config.data.get('catalog_host', 'https://gacatalog.apps.rapyuta.io') + catalog_host = config.data.get( + 'catalog_host', 'https://gacatalog.apps.rapyuta.io') url = '{}/disk'.format(catalog_host) if guid: url = '{}/{}'.format(url, guid) headers = config.get_auth_header() - response = RestClient(url).method(method).headers(headers).execute(payload=payload) + response = RestClient(url).method(method).headers( + headers).execute(payload=payload) data = None err_msg = 'error in the api call' if load_response: diff --git a/riocli/managedservice/__init__.py b/riocli/managedservice/__init__.py new file mode 100644 index 00000000..c3465e81 --- /dev/null +++ b/riocli/managedservice/__init__.py @@ -0,0 +1,40 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import click +from click_help_colors import HelpColorsGroup + +from riocli.managedservice.inspect import inspect_instance +from riocli.managedservice.list import list_instances +from riocli.managedservice.list_providers import list_providers + + +@click.group( + invoke_without_command=False, + cls=HelpColorsGroup, + help_headers_color='yellow', + help_options_color='green', +) +def managedservice() -> None: + """ + Managed Services on rapyuta.io + + With managed services on rapyuta.io, you can provision services + like elasticsearch, etc. on-demand and use them with your deployments. + """ + pass + + +managedservice.add_command(list_providers) +managedservice.add_command(list_instances) +managedservice.add_command(inspect_instance) diff --git a/riocli/managedservice/inspect.py b/riocli/managedservice/inspect.py new file mode 100644 index 00000000..9a088b8b --- /dev/null +++ b/riocli/managedservice/inspect.py @@ -0,0 +1,35 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import click + +from riocli.managedservice.util import ManagedServicesClient +from riocli.utils import inspect_with_format + + +@click.command('inspect') +@click.option('--format', '-f', 'format_type', default='yaml', + type=click.Choice(['json', 'yaml'], case_sensitive=False)) +@click.argument('instance-name', required=True) +def inspect_instance(format_type: str, instance_name: str) -> None: + """ + Inspect a managedservice instance + """ + try: + client = ManagedServicesClient() + instance = client.get_instance(instance_name) + inspect_with_format(instance, format_type) + except Exception as e: + click.secho(str(e), fg='red') + raise SystemExit(1) diff --git a/riocli/managedservice/list.py b/riocli/managedservice/list.py new file mode 100644 index 00000000..4dae95b0 --- /dev/null +++ b/riocli/managedservice/list.py @@ -0,0 +1,50 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import typing + +import click +from tabulate import tabulate + +from riocli.managedservice.util import ManagedServicesClient + + +@click.command('list') +def list_instances() -> None: + """ + List all the managedservice instances + """ + try: + client = ManagedServicesClient() + instances = client.list_instances() + _display_instances(instances) + except Exception as e: + click.secho(str(e), fg='red') + raise SystemExit(1) + + +def _display_instances(instances: typing.Any): + headers = [click.style(h, fg='yellow') + for h in ("Provider", "Name", "Created At", "Labels")] + + table = [] + for i in instances: + m = i['metadata'] + table.append([ + i['spec']['provider'], + m['name'], + m['created_at'], + m['labels'] + ]) + + click.echo(tabulate(table, headers=headers, tablefmt='simple')) diff --git a/riocli/managedservice/list_providers.py b/riocli/managedservice/list_providers.py new file mode 100644 index 00000000..197185c9 --- /dev/null +++ b/riocli/managedservice/list_providers.py @@ -0,0 +1,45 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import typing + +import click +from tabulate import tabulate + +from riocli.managedservice.util import ManagedServicesClient + + +@click.command('providers') +def list_providers() -> None: + """ + List available managedservice providers + """ + try: + client = ManagedServicesClient() + providers = client.list_providers() + _display_providers(providers) + except Exception as e: + click.secho(str(e), fg='red') + raise SystemExit(1) + + +def _display_providers(providers: typing.Any): + headers = [click.style(h, fg='yellow') for h in ('Provider Name',)] + + table = [] + for provider in providers: + if provider['name'] == 'dummy': + continue + table.append([provider['name']]) + + click.echo(tabulate(table, headers=headers, tablefmt='simple')) diff --git a/riocli/managedservice/model.py b/riocli/managedservice/model.py new file mode 100644 index 00000000..fe79bb3a --- /dev/null +++ b/riocli/managedservice/model.py @@ -0,0 +1,58 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import typing + +from munch import munchify +from rapyuta_io import Client + +from riocli.managedservice.util import ManagedServicesClient +from riocli.managedservice.validation import validate +from riocli.model import Model + + +class ManagedService(Model): + def find_object(self, client: Client) -> typing.Any: + name = self.metadata.name + client = ManagedServicesClient() + + try: + instance = client.get_instance(name) + return munchify(instance) + except Exception: + return False + + def create_object(self, client: Client) -> typing.Any: + client = ManagedServicesClient() + result = client.create_instance(self) + return munchify(result) + + def update_object(self, client: Client, obj: typing.Any) -> typing.Any: + pass + + def delete_object(self, client: Client, obj: typing.Any) -> typing.Any: + client = ManagedServicesClient() + client.delete_instance(obj.metadata.name) + + @staticmethod + def list_instances(): + client = ManagedServicesClient() + return client.list_instances() + + @classmethod + def pre_process(cls, client: Client, d: typing.Dict) -> None: + pass + + @staticmethod + def validate(d): + validate(d) diff --git a/riocli/managedservice/util.py b/riocli/managedservice/util.py new file mode 100644 index 00000000..bac6840e --- /dev/null +++ b/riocli/managedservice/util.py @@ -0,0 +1,107 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import typing + +from rapyuta_io.utils.rest_client import HttpMethod, RestClient + +from riocli.config import Configuration + + +class ManagedServicesClient(object): + PROD_V2API_URL = "https://api.rapyuta.io" + + def __init__(self): + super().__init__() + config = Configuration() + self.host = config.data.get('v2api_host', self.PROD_V2API_URL) + self.base_url = "{}/v2/managedservices".format(self.host) + + def list_providers(self): + url = "{}/providers/".format(self.base_url) + headers = Configuration().get_auth_header() + response = RestClient(url).method( + HttpMethod.GET).headers(headers).execute() + data = json.loads(response.text) + if not response.ok: + err_msg = data.get('error') + raise Exception("managedservice: {}".format(err_msg)) + return data.get('items', []) + + def list_instances(self): + url = "{}/".format(self.base_url) + headers = Configuration().get_auth_header() + offset = 0 + result = [] + while True: + response = RestClient(url).method(HttpMethod.GET).query_param({ + "continue": offset, + }).headers(headers).execute() + data = json.loads(response.text) + if not response.ok: + err_msg = data.get('error') + raise Exception("managedservice: {}".format(err_msg)) + instances = data.get('items', []) + if not instances: + break + offset = data['metadata']['continue'] + result.extend(instances) + + return sorted(result, key=lambda x: x['metadata']['name']) + + def get_instance(self, instance_name: str) -> typing.Dict: + url = "{}/{}/".format(self.base_url, instance_name) + headers = Configuration().get_auth_header() + response = RestClient(url).method( + HttpMethod.GET).headers(headers).execute() + data = json.loads(response.text) + if not response.ok: + err_msg = data.get('error') + raise Exception("managedservice: {}".format(err_msg)) + return data + + def create_instance(self, instance: typing.Any) -> typing.Dict: + url = "{}/".format(self.base_url) + headers = Configuration().get_auth_header() + + payload = { + "metadata": { + "name": instance.metadata.name, + "labels": instance.metadata.get("labels", None), + }, + "spec": { + "provider": instance.spec.provider, + "config": instance.spec.get("config", None) + } + } + + response = RestClient(url).method(HttpMethod.POST).headers( + headers).execute(payload=payload) + data = json.loads(response.text) + if not response.ok: + err_msg = data.get('error') + raise Exception("managedservice: {}".format(err_msg)) + return data + + def delete_instance(self, instance_name): + url = "{}/{}/".format(self.base_url, instance_name) + headers = Configuration().get_auth_header() + response = RestClient(url).method( + HttpMethod.DELETE).headers(headers).execute() + data = json.loads(response.text) + if not response.ok: + err_msg = data.get('error') + raise Exception("managedservice: {}".format(err_msg)) + return data diff --git a/riocli/managedservice/validation.py b/riocli/managedservice/validation.py new file mode 100644 index 00000000..76129621 --- /dev/null +++ b/riocli/managedservice/validation.py @@ -0,0 +1,130 @@ +VERSION = "2.16.2" +import re +from fastjsonschema import JsonSchemaValueException + + +REGEX_PATTERNS = { + '^project-[a-z]{24}$': re.compile('^project-[a-z]{24}\\Z'), + '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$': re.compile('^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}\\Z') +} + +NoneType = type(None) + +def validate(data, custom_formats={}, name_prefix=None): + validate___definitions_managedservice(data, custom_formats, (name_prefix or "data") + "") + return data + +def validate___definitions_managedservice(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'apiVersion': {'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, 'kind': {'const': 'ManagedService', 'default': 'ManagedService'}, 'metadata': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'guid': {'$ref': '#/definitions/uuid'}, 'creator': {'$ref': '#/definitions/uuid'}, 'project': {'$ref': '#/definitions/projectGUID'}, 'labels': {'$ref': '#/definitions/stringMap'}}, 'required': ['name']}, 'spec': {'type': 'object', 'properties': {'provider': {'type': 'string', 'enum': ['elasticsearch']}, 'config': {'type': 'object'}}, 'required': ['provider', 'config']}}, 'required': ['apiVersion', 'kind', 'metadata', 'spec']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['apiVersion', 'kind', 'metadata', 'spec']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['apiVersion', 'kind', 'metadata', 'spec'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'apiVersion': {'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, 'kind': {'const': 'ManagedService', 'default': 'ManagedService'}, 'metadata': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'guid': {'$ref': '#/definitions/uuid'}, 'creator': {'$ref': '#/definitions/uuid'}, 'project': {'$ref': '#/definitions/projectGUID'}, 'labels': {'$ref': '#/definitions/stringMap'}}, 'required': ['name']}, 'spec': {'type': 'object', 'properties': {'provider': {'type': 'string', 'enum': ['elasticsearch']}, 'config': {'type': 'object'}}, 'required': ['provider', 'config']}}, 'required': ['apiVersion', 'kind', 'metadata', 'spec']}, rule='required') + data_keys = set(data.keys()) + if "apiVersion" in data_keys: + data_keys.remove("apiVersion") + data__apiVersion = data["apiVersion"] + if data__apiVersion != "apiextensions.rapyuta.io/v1": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".apiVersion must be same as const definition: apiextensions.rapyuta.io/v1", value=data__apiVersion, name="" + (name_prefix or "data") + ".apiVersion", definition={'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, rule='const') + else: data["apiVersion"] = 'apiextensions.rapyuta.io/v1' + if "kind" in data_keys: + data_keys.remove("kind") + data__kind = data["kind"] + if data__kind != "ManagedService": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".kind must be same as const definition: ManagedService", value=data__kind, name="" + (name_prefix or "data") + ".kind", definition={'const': 'ManagedService', 'default': 'ManagedService'}, rule='const') + else: data["kind"] = 'ManagedService' + if "metadata" in data_keys: + data_keys.remove("metadata") + data__metadata = data["metadata"] + validate___definitions_metadata(data__metadata, custom_formats, (name_prefix or "data") + ".metadata") + if "spec" in data_keys: + data_keys.remove("spec") + data__spec = data["spec"] + validate___definitions_managedservicespec(data__spec, custom_formats, (name_prefix or "data") + ".spec") + return data + +def validate___definitions_managedservicespec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'provider': {'type': 'string', 'enum': ['elasticsearch']}, 'config': {'type': 'object'}}, 'required': ['provider', 'config']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['provider', 'config']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['provider', 'config'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'provider': {'type': 'string', 'enum': ['elasticsearch']}, 'config': {'type': 'object'}}, 'required': ['provider', 'config']}, rule='required') + data_keys = set(data.keys()) + if "provider" in data_keys: + data_keys.remove("provider") + data__provider = data["provider"] + if not isinstance(data__provider, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".provider must be string", value=data__provider, name="" + (name_prefix or "data") + ".provider", definition={'type': 'string', 'enum': ['elasticsearch']}, rule='type') + if data__provider not in ['elasticsearch']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".provider must be one of ['elasticsearch']", value=data__provider, name="" + (name_prefix or "data") + ".provider", definition={'type': 'string', 'enum': ['elasticsearch']}, rule='enum') + if "config" in data_keys: + data_keys.remove("config") + data__config = data["config"] + if not isinstance(data__config, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".config must be object", value=data__config, name="" + (name_prefix or "data") + ".config", definition={'type': 'object'}, rule='type') + return data + +def validate___definitions_metadata(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'guid': {'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, 'creator': {'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, 'project': {'type': 'string', 'pattern': '^project-[a-z]{24}$'}, 'labels': {'type': 'object', 'additionalProperties': {'type': 'string'}}}, 'required': ['name']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['name']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['name'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'guid': {'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, 'creator': {'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, 'project': {'type': 'string', 'pattern': '^project-[a-z]{24}$'}, 'labels': {'type': 'object', 'additionalProperties': {'type': 'string'}}}, 'required': ['name']}, rule='required') + data_keys = set(data.keys()) + if "name" in data_keys: + data_keys.remove("name") + data__name = data["name"] + if not isinstance(data__name, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string'}, rule='type') + if "guid" in data_keys: + data_keys.remove("guid") + data__guid = data["guid"] + validate___definitions_uuid(data__guid, custom_formats, (name_prefix or "data") + ".guid") + if "creator" in data_keys: + data_keys.remove("creator") + data__creator = data["creator"] + validate___definitions_uuid(data__creator, custom_formats, (name_prefix or "data") + ".creator") + if "project" in data_keys: + data_keys.remove("project") + data__project = data["project"] + validate___definitions_projectguid(data__project, custom_formats, (name_prefix or "data") + ".project") + if "labels" in data_keys: + data_keys.remove("labels") + data__labels = data["labels"] + validate___definitions_stringmap(data__labels, custom_formats, (name_prefix or "data") + ".labels") + return data + +def validate___definitions_stringmap(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'additionalProperties': {'type': 'string'}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + for data_key in data_keys: + if data_key not in []: + data_value = data.get(data_key) + if not isinstance(data_value, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".{data_key}".format(**locals()) + " must be string", value=data_value, name="" + (name_prefix or "data") + ".{data_key}".format(**locals()) + "", definition={'type': 'string'}, rule='type') + return data + +def validate___definitions_projectguid(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^project-[a-z]{24}$'}, rule='type') + if isinstance(data, str): + if not REGEX_PATTERNS['^project-[a-z]{24}$'].search(data): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must match pattern ^project-[a-z]{24}$", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^project-[a-z]{24}$'}, rule='pattern') + return data + +def validate___definitions_uuid(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, rule='type') + if isinstance(data, str): + if not REGEX_PATTERNS['^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'].search(data): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must match pattern ^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, rule='pattern') + return data \ No newline at end of file diff --git a/riocli/model/base.py b/riocli/model/base.py index 3df86a92..844bd1e4 100644 --- a/riocli/model/base.py +++ b/riocli/model/base.py @@ -23,40 +23,47 @@ from riocli.project.util import find_project_guid -prompt = ">> {}{}{} [{}]" #>> msg spacer rigth_msg time +prompt = ">> {}{}{} [{}]" # >> msg spacer rigth_msg time DELETE_POLICY_LABEL = 'rapyuta.io/deletionPolicy' + def message_with_prompt(msg, right_msg="", fg='white', with_time=True): columns, _ = get_terminal_size() time = datetime.now().isoformat('T') spacer = ' '*(int(columns) - len(msg + right_msg + time) - 12) msg = prompt.format(msg, spacer, right_msg, time) click.secho(msg, fg=fg) - + class Model(ABC, Munch): - + def apply(self, client: Client, *args, **kwargs) -> typing.Any: try: self._set_project_in_client(client) obj = self.find_object(client) dryrun = kwargs.get("dryrun", False) if not obj: - message_with_prompt("⌛ Create {}:{}".format(self.kind.lower(), self.metadata.name), fg='yellow') + message_with_prompt("⌛ Create {}:{}".format( + self.kind.lower(), self.metadata.name), fg='yellow') if not dryrun: result = self.create_object(client) - message_with_prompt("✅ Created {}:{}".format(self.kind.lower(), self.metadata.name), fg='green') + message_with_prompt("✅ Created {}:{}".format( + self.kind.lower(), self.metadata.name), fg='green') return result else: - message_with_prompt('🔎 {}:{} exists. will be updated'.format(self.kind.lower(), self.metadata.name)) - message_with_prompt("⌛ Update {}:{}".format(self.kind.lower(), self.metadata.name), fg='yellow') + message_with_prompt('🔎 {}:{} exists. will be updated'.format( + self.kind.lower(), self.metadata.name)) + message_with_prompt("⌛ Update {}:{}".format( + self.kind.lower(), self.metadata.name), fg='yellow') if not dryrun: result = self.update_object(client, obj) - message_with_prompt("✅ Updated {}:{}".format(self.kind.lower(), self.metadata.name), fg='green') + message_with_prompt("✅ Updated {}:{}".format( + self.kind.lower(), self.metadata.name), fg='green') return result except Exception as e: - message_with_prompt("‼ ERR {}:{}. {} ‼".format(self.kind.lower(), self.metadata.name, str(e)), fg="red") + message_with_prompt("‼ ERR {}:{}. {} ‼".format( + self.kind.lower(), self.metadata.name, str(e)), fg="red") raise e def delete(self, client: Client, obj: typing.Any, *args, **kwargs): @@ -66,24 +73,27 @@ def delete(self, client: Client, obj: typing.Any, *args, **kwargs): dryrun = kwargs.get("dryrun", False) if not obj: - message_with_prompt('⁉ {}:{} does not exist'.format(self.kind.lower(), self.metadata.name)) + message_with_prompt('⁉ {}:{} does not exist'.format( + self.kind.lower(), self.metadata.name)) return else: - message_with_prompt("⌛ Delete {}:{}".format(self.kind.lower(), self.metadata.name), fg='yellow') + message_with_prompt("⌛ Delete {}:{}".format( + self.kind.lower(), self.metadata.name), fg='yellow') if not dryrun: labels = self.metadata.get('labels', {}) if DELETE_POLICY_LABEL in labels and \ - labels.get(DELETE_POLICY_LABEL) and \ + labels.get(DELETE_POLICY_LABEL) and \ labels.get(DELETE_POLICY_LABEL).lower() == "retain": - click.secho(">> Warning: delete protection enabled on {}:{}. Resource will be retained ".format(self.kind.lower(), self.metadata.name), fg="yellow") - return + click.secho(">> Warning: delete protection enabled on {}:{}. Resource will be retained ".format( + self.kind.lower(), self.metadata.name), fg="yellow") + return - self.delete_object(client, obj) - message_with_prompt("❌ Deleted {}:{}".format(self.kind.lower(), self.metadata.name), fg='red') - + message_with_prompt("❌ Deleted {}:{}".format( + self.kind.lower(), self.metadata.name), fg='red') except Exception as e: - message_with_prompt("‼ ERR {}:{}. {} ‼".format(self.kind.lower(), self.metadata.name, str(e)), fg="red") + message_with_prompt("‼ ERR {}:{}. {} ‼".format( + self.kind.lower(), self.metadata.name, str(e)), fg="red") raise e @abstractmethod