From ae470ec334c8c06540a4c973890bf556d4d46424 Mon Sep 17 00:00:00 2001 From: Abhijeet Singh Kaurav Date: Thu, 24 Mar 2022 08:45:23 +0530 Subject: [PATCH] Perform Cache Atomic updates for entity crud operations (#223) * Add basic configuration for atomic udates * minor fix for subnet query * fix atomic operations for environment * adding atomic ops for users and group, roles cache * Adding changes for cache object * Improving projects update cache * Plugging atomic operarions for users, groups, projects env for commands * Fixing update_one helper * Add calm update cache for individual entity * Added cache changes in readme * Fixing get_cache_table_types helper * Correcting comment --- calm/dsl/api/resource.py | 1 + calm/dsl/api/setting.py | 11 +- calm/dsl/cli/cache_commands.py | 32 ++- calm/dsl/cli/environments.py | 23 +- calm/dsl/cli/groups.py | 16 +- calm/dsl/cli/projects.py | 56 ++-- calm/dsl/cli/users.py | 16 +- calm/dsl/db/table_config.py | 501 ++++++++++++++++++++++++++++----- calm/dsl/store/cache.py | 75 +++-- release-notes/3.4.0/README.md | 4 + 10 files changed, 584 insertions(+), 151 deletions(-) diff --git a/calm/dsl/api/resource.py b/calm/dsl/api/resource.py index 329863e09..5ebf34806 100644 --- a/calm/dsl/api/resource.py +++ b/calm/dsl/api/resource.py @@ -98,6 +98,7 @@ def get_uuid_name_map(self, params={}): return uuid_name_map + # TODO: Fix return type of list_all helper def list_all(self, api_limit=250, base_params=None, ignore_error=False): """returns the list of entities""" diff --git a/calm/dsl/api/setting.py b/calm/dsl/api/setting.py index 75db4f94b..e40917ba8 100644 --- a/calm/dsl/api/setting.py +++ b/calm/dsl/api/setting.py @@ -29,17 +29,12 @@ def vms_list(self, id, params=dict()): def get_uuid_type_map(self, params=dict()): """returns map containing {account_uuid: account_type} details""" - response, err = self.list(params) + res_entities, err = self.list_all(base_params=params, ignore_error=True) if err: - raise Exception("[{}] - {}".format(err["code"], err["error"])) - - response = response.json() - total_matches = response["metadata"]["total_matches"] - if total_matches == 0: - return {} + raise Exception(err) uuid_type_map = {} - for entity in response["entities"]: + for entity in res_entities: a_uuid = entity["metadata"]["uuid"] a_type = entity["status"]["resources"]["type"] uuid_type_map[a_uuid] = a_type diff --git a/calm/dsl/cli/cache_commands.py b/calm/dsl/cli/cache_commands.py index d37db4c85..8e5531eea 100644 --- a/calm/dsl/cli/cache_commands.py +++ b/calm/dsl/cli/cache_commands.py @@ -1,6 +1,8 @@ import datetime +import click from calm.dsl.store import Cache +from calm.dsl.constants import CACHE from .main import show, update, clear from .utils import highlight_text @@ -9,6 +11,19 @@ LOG = get_logging_handle(__name__) +def get_cache_table_types(): + """returns cache table types""" + + # Note do not use Cache.get_cache_tables().keys(), + # It will break, container initialization due to cyclic dependency + table_types = [] + for attr in CACHE.ENTITY.__dict__: + if not (attr.startswith("__")): + table_types.append(getattr(CACHE.ENTITY, attr)) + + return table_types + + @show.command("cache") def show_cache_command(): """Display the cache data""" @@ -25,9 +40,20 @@ def clear_cache(): @update.command("cache") -def update_cache(): +@click.option( + "--entity", + "-e", + default=None, + help="Cache entity, if not given will update whole cache", + type=click.Choice(get_cache_table_types()), +) +def update_cache(entity): """Update the data for dynamic entities stored in the cache""" - Cache.sync() - Cache.show_data() + if entity: + Cache.sync_table(entity) + Cache.show_table(entity) + else: + Cache.sync() + Cache.show_data() LOG.info(highlight_text("Cache updated at {}".format(datetime.datetime.now()))) diff --git a/calm/dsl/cli/environments.py b/calm/dsl/cli/environments.py index 1391096b6..66a7466f8 100644 --- a/calm/dsl/cli/environments.py +++ b/calm/dsl/cli/environments.py @@ -242,11 +242,12 @@ def create_environment_from_dsl_file( click.echo(json.dumps(env_std_out, indent=4, separators=(",", ": "))) if no_cache_update: - LOG.info("skipping environments and projects cache update") + LOG.info("skipping environments cache update") else: - LOG.info("Updating projects and environments cache ...") - Cache.sync_table(cache_type=CACHE.ENTITY.PROJECT) - Cache.sync_table(cache_type=CACHE.ENTITY.ENVIRONMENT) + LOG.info("Updating environments cache ...") + Cache.add_one( + entity_type=CACHE.ENTITY.ENVIRONMENT, uuid=env_std_out.get("uuid") + ) LOG.info("[Done]") @@ -314,11 +315,10 @@ def update_environment_from_dsl_file( click.echo(json.dumps(stdout_dict, indent=4, separators=(",", ": "))) if no_cache_update: - LOG.info("skipping environments and projects cache update") + LOG.info("skipping environments cache update") else: - LOG.info("Updating projects and environments cache ...") - Cache.sync_table(cache_type=CACHE.ENTITY.PROJECT) - Cache.sync_table(cache_type=CACHE.ENTITY.ENVIRONMENT) + LOG.info("Updating environments cache ...") + Cache.update_one(entity_type=CACHE.ENTITY.ENVIRONMENT, uuid=environment_id) LOG.info("[Done]") @@ -504,9 +504,8 @@ def delete_environment(environment_name, project_name, no_cache_update=False): update_project_envs(project_name, remove_env_uuids=[environment_id]) if no_cache_update: - LOG.info("skipping environments and projects cache update") + LOG.info("skipping environments cache update") else: - LOG.info("Updating environments and projects cache ...") - Cache.sync_table(cache_type=CACHE.ENTITY.PROJECT) - Cache.sync_table(cache_type=CACHE.ENTITY.ENVIRONMENT) + LOG.info("Updating environments cache ...") + Cache.delete_one(entity_type=CACHE.ENTITY.ENVIRONMENT, uuid=environment_id) LOG.info("[Done]") diff --git a/calm/dsl/cli/groups.py b/calm/dsl/cli/groups.py index aaf6490df..dea88c698 100644 --- a/calm/dsl/cli/groups.py +++ b/calm/dsl/cli/groups.py @@ -133,6 +133,7 @@ def create_group(name): } click.echo(json.dumps(stdout_dict, indent=4, separators=(",", ": "))) + user_group_uuid = res["metadata"]["uuid"] LOG.info("Polling on user-group creation task") task_state = watch_task( res["status"]["execution_context"]["task_uuid"], poll_interval=5 @@ -143,7 +144,7 @@ def create_group(name): # Update user-groups in cache LOG.info("Updating user-groups cache ...") - Cache.sync_table(cache_type=CACHE.ENTITY.USER_GROUP) + Cache.add_one(entity_type=CACHE.ENTITY.USER_GROUP, uuid=user_group_uuid) LOG.info("[Done]") @@ -152,12 +153,15 @@ def delete_group(group_names): client = get_api_client() + deleted_group_uuids = [] for name in group_names: group_ref = Ref.Group(name) res, err = client.group.delete(group_ref["uuid"]) if err: - raise Exception("[{}] - {}".format(err["code"], err["error"])) + LOG.exception("[{}] - {}".format(err["code"], err["error"])) + sys.exit(-1) + deleted_group_uuids.append(group_ref["uuid"]) LOG.info("Polling on user-group deletion task") res = res.json() task_state = watch_task( @@ -170,6 +174,8 @@ def delete_group(group_names): sys.exit(-1) # Update user-groups in cache - LOG.info("Updating user-groups cache ...") - Cache.sync_table(cache_type=CACHE.ENTITY.USER_GROUP) - LOG.info("[Done]") + if deleted_group_uuids: + LOG.info("Updating user-groups cache ...") + for _group_uuid in deleted_group_uuids: + Cache.delete_one(entity_type=CACHE.ENTITY.USER_GROUP, uuid=_group_uuid) + LOG.info("[Done]") diff --git a/calm/dsl/cli/projects.py b/calm/dsl/cli/projects.py index 66da71248..f9d83016c 100644 --- a/calm/dsl/cli/projects.py +++ b/calm/dsl/cli/projects.py @@ -378,11 +378,12 @@ def create_project_from_dsl( project_name = project_data["name"] project_uuid = project_data["uuid"] + # Update project in cache + LOG.info("Updating projects cache") + Cache.add_one(entity_type=CACHE.ENTITY.PROJECT, uuid=project_uuid) + LOG.info("[Done]") + if envs: - # Update project in cache - LOG.info("Updating projects cache") - Cache.sync_table("project") - LOG.info("[Done]") # As ahv helpers in environment should use account from project accounts # updating the context @@ -428,14 +429,16 @@ def create_project_from_dsl( # Reset the context changes ContextObj.reset_configuration() - if no_cache_update: - LOG.info("skipping projects and environments cache update") - else: - # Update projects in cache - LOG.info("Updating projects and environments cache ...") - Cache.sync_table(cache_type=CACHE.ENTITY.PROJECT) - Cache.sync_table(cache_type=CACHE.ENTITY.ENVIRONMENT) - LOG.info("[Done]") + if no_cache_update: + LOG.info("Skipping environments cache update") + else: + # Update environments in cache + LOG.info("Updating environments cache ...") + for _e_item in env_ref_list: + Cache.add_one( + entity_type=CACHE.ENTITY.ENVIRONMENT, uuid=_e_item["uuid"] + ) + LOG.info("[Done]") def describe_project(project_name, out): @@ -568,20 +571,21 @@ def describe_project(project_name, out): def delete_project(project_names, no_cache_update=False): client = get_api_client() - params = {"length": 1000} - project_name_uuid_map = client.project.get_name_uuid_map(params) - projects_deleted = False + project_name_uuid_map = client.project.get_name_uuid_map() + deleted_projects_uuids = [] for project_name in project_names: project_id = project_name_uuid_map.get(project_name, "") if not project_id: LOG.warning("Project {} not found.".format(project_name)) continue - projects_deleted = True LOG.info("Deleting project '{}'".format(project_name)) res, err = client.project.delete(project_id) if err: - raise Exception("[{}] - {}".format(err["code"], err["error"])) + LOG.exception("[{}] - {}".format(err["code"], err["error"])) + continue + + deleted_projects_uuids.append(project_id) LOG.info("Polling on project deletion task") res = res.json() @@ -593,13 +597,13 @@ def delete_project(project_names, no_cache_update=False): sys.exit(-1) # Update projects in cache if any project has been deleted - if projects_deleted: + if deleted_projects_uuids: if no_cache_update: - LOG.info("skipping projects and environment cache update") + LOG.info("skipping projects cache update") else: - LOG.info("Updating projects and environment cache ...") - Cache.sync_table(cache_type=CACHE.ENTITY.PROJECT) - Cache.sync_table(cache_type=CACHE.ENTITY.ENVIRONMENT) + LOG.info("Updating projects cache ...") + for _proj_id in deleted_projects_uuids: + Cache.delete_one(entity_type=CACHE.ENTITY.PROJECT, uuid=_proj_id) LOG.info("[Done]") @@ -790,10 +794,10 @@ def update_project_from_dsl(project_name, project_file, no_cache_update=False): sys.exit(-1) if no_cache_update: - LOG.info("skipping projects cache update") + LOG.info("Skipping projects cache update") else: LOG.info("Updating projects cache ...") - Cache.sync_table(cache_type=CACHE.ENTITY.PROJECT) + Cache.update_one(entity_type=CACHE.ENTITY.PROJECT, uuid=project_uuid) LOG.info("[Done]") @@ -992,6 +996,10 @@ def update_project_using_cli_switches( LOG.exception("Project updation task went to {} state".format(task_state)) sys.exit(-1) + LOG.info("Updating projects cache ...") + Cache.update_one(entity_type=CACHE.ENTITY.PROJECT, uuid=project_uuid) + LOG.info("[Done]") + def remove_users_from_project_acps(project_uuid, remove_user_list, remove_group_list): diff --git a/calm/dsl/cli/users.py b/calm/dsl/cli/users.py index 108ff754a..d60e813a2 100644 --- a/calm/dsl/cli/users.py +++ b/calm/dsl/cli/users.py @@ -127,6 +127,7 @@ def create_user(name, directory_service): } click.echo(json.dumps(stdout_dict, indent=4, separators=(",", ": "))) + user_uuid = res["metadata"]["uuid"] LOG.info("Polling on user creation task") task_state = watch_task( res["status"]["execution_context"]["task_uuid"], poll_interval=5 @@ -137,7 +138,7 @@ def create_user(name, directory_service): # Update users in cache LOG.info("Updating users cache ...") - Cache.sync_table(cache_type=CACHE.ENTITY.USER) + Cache.add_one(entity_type=CACHE.ENTITY.USER, uuid=user_uuid) LOG.info("[Done]") @@ -147,6 +148,7 @@ def delete_user(user_names): params = {"length": 1000} user_name_uuid_map = client.user.get_name_uuid_map(params) + deleted_user_uuids = [] for name in user_names: user_uuid = user_name_uuid_map.get(name, "") if not user_uuid: @@ -155,8 +157,10 @@ def delete_user(user_names): res, err = client.user.delete(user_uuid) if err: - raise Exception("[{}] - {}".format(err["code"], err["error"])) + LOG.exception("[{}] - {}".format(err["code"], err["error"])) + sys.exit(-1) + deleted_user_uuids.append(user_uuid) LOG.info("Polling on user deletion task") res = res.json() task_state = watch_task( @@ -167,6 +171,8 @@ def delete_user(user_names): sys.exit(-1) # Update users in cache - LOG.info("Updating users cache ...") - Cache.sync_table(cache_type=CACHE.ENTITY.USER) - LOG.info("[Done]") + if deleted_user_uuids: + LOG.info("Updating users cache ...") + for _user_uuid in deleted_user_uuids: + Cache.delete_one(entity_type=CACHE.ENTITY.USER, uuid=_user_uuid) + LOG.info("[Done]") diff --git a/calm/dsl/db/table_config.py b/calm/dsl/db/table_config.py index 8c4e322c9..1d15960c6 100644 --- a/calm/dsl/db/table_config.py +++ b/calm/dsl/db/table_config.py @@ -65,14 +65,18 @@ class CacheTableBase(BaseModel): def __init_subclass__(cls, **kwargs): super().__init_subclass__(**kwargs) - if not hasattr(cls, "__cache_type__"): + cache_type = cls.get_cache_type() + if not cache_type: raise TypeError("Base table does not have a cache type attribute") - cache_type = cls.__cache_type__ cls.tables[cache_type] = cls def get_detail_dict(self): - raise NotImplementedError("get_detail_dict helper not implemented") + raise NotImplementedError( + "get_detail_dict helper not implemented for {} table".format( + self.get_cache_type() + ) + ) @classmethod def get_provider_plugin(self, provider_type="AHV_VM"): @@ -87,30 +91,82 @@ def get_provider_plugin(self, provider_type="AHV_VM"): def get_cache_tables(cls): return cls.tables + @classmethod + def get_cache_type(cls): + """return cache type for the table""" + + return getattr(cls, "__cache_type__", None) + @classmethod def clear(cls): """removes entire data from table""" - raise NotImplementedError("clear helper not implemented") + raise NotImplementedError( + "clear helper not implemented for {} table".format(cls.get_cache_type()) + ) @classmethod def show_data(cls): - raise NotImplementedError("show_data helper not implemented") + raise NotImplementedError( + "show_data helper not implemented for {} table".format(cls.get_cache_type()) + ) @classmethod def sync(cls): - raise NotImplementedError("sync helper not implemented") + raise NotImplementedError( + "sync helper not implemented for {} table".format(cls.get_cache_type()) + ) @classmethod def create_entry(cls, name, uuid, **kwargs): - raise NotImplementedError("create_entry helper not implemented") + raise NotImplementedError( + "create_entry helper not implemented for {} table".format( + cls.get_cache_type() + ) + ) @classmethod def get_entity_data(cls, name, **kwargs): - raise NotImplementedError("get_entity_data helper not implemented") + raise NotImplementedError( + "get_entity_data helper not implemented for {} table".format( + cls.get_cache_type() + ) + ) @classmethod def get_entity_data_using_uuid(cls, uuid, **kwargs): - raise NotImplementedError("get_entity_data_using_uuid helper not implemented") + raise NotImplementedError( + "get_entity_data_using_uuid helper not implemented for {} table".format( + cls.get_cache_type() + ) + ) + + @classmethod + def fetch_one(cls, uuid): + raise NotImplementedError( + "fetch one helper not implemented for {} table".format(cls.get_cache_type()) + ) + + @classmethod + def add_one(cls, uuid, **kwargs): + raise NotImplementedError( + "add_one helper not implemented for {} table".format(cls.get_cache_type()) + ) + + @classmethod + def delete_one(cls, uuid, **kwargs): + raise NotImplementedError( + "delete_one helper not implemented for {} table".format( + cls.get_cache_type() + ) + ) + + @classmethod + def update_one(cls, uuid, **kwargs): + raise NotImplementedError( + "update_one helper not implemented for {} table".format( + cls.get_cache_type() + ) + ) class AhvSubnetsCache(CacheTableBase): @@ -638,31 +694,44 @@ def sync(cls): client = get_api_client() payload = {"length": 200, "offset": 0, "filter": "state!=DELETED;type!=nutanix"} - res, err = client.account.list(payload) - if err: - raise Exception("[{}] - {}".format(err["code"], err["error"])) + account_uuid_type_map = client.account.get_uuid_type_map(payload) - res = res.json() - account_uuid_type_map = {} - local_nutanix_pc_account_uuid = "" - for entity in res["entities"]: - a_uuid = entity["metadata"]["uuid"] - a_type = entity["status"]["resources"]["type"] - account_uuid_type_map[a_uuid] = a_type - if a_type == "nutanix_pc" and entity["status"]["resources"]["data"].get( - "host_pc", False - ): - local_nutanix_pc_account_uuid = a_uuid - - Obj = get_resource_api("projects", client.connection) - res, err = Obj.list({"length": 1000}) + # store subnets for nutanix_pc accounts in some map, else we had to subnets api + # for each project (Speed very low in case of ~1000 projects) + ntnx_pc_account_subnet_map = dict() + for _acct_uuid in account_uuid_type_map.keys(): + if account_uuid_type_map[_acct_uuid] == "nutanix_pc": + ntnx_pc_account_subnet_map[_acct_uuid] = list() + + # Get the subnets for each nutanix_pc account + AhvVmProvider = cls.get_provider_plugin("AHV_VM") + AhvObj = AhvVmProvider.get_api_obj() + for acct_uuid in ntnx_pc_account_subnet_map.keys(): + LOG.debug( + "Fetching subnets for nutanix_pc account_uuid {}".format(acct_uuid) + ) + try: + res = AhvObj.subnets(account_uuid=acct_uuid) + except Exception as exp: + LOG.exception(exp) + LOG.warning( + "Unable to fetch subnets for Nutanix_PC Account(uuid={})".format( + acct_uuid + ) + ) + continue + + for row in res["entities"]: + ntnx_pc_account_subnet_map[acct_uuid].append(row["metadata"]["uuid"]) + + # Getting projects data + res_entities, err = client.project.list_all(ignore_error=True) if err: - raise Exception("[{}] - {}".format(err["code"], err["error"])) + LOG.exception(err) - res = res.json() - for entity in res["entities"]: + for entity in res_entities: # populating a map to lookup the account to which a subnet belongs - subnet_to_account_map = dict() + whitelisted_subnets = dict() name = entity["status"]["name"] uuid = entity["metadata"]["uuid"] @@ -670,13 +739,12 @@ def sync(cls): account_list = entity["status"]["resources"].get( "account_reference_list", [] ) - subnets_ref_list = entity["status"]["resources"].get( - "subnet_reference_list", [] - ) - # Use spec dict in entity-payload for external subnets - external_subnets_ref_list = ( - entity["spec"].get("resources", {}).get("external_network_list", []) - ) + + project_subnets_ref_list = entity["spec"].get("resources", {}).get( + "external_network_list", [] + ) + entity["spec"].get("resources", {}).get("subnet_reference_list", []) + project_subnet_uuids = [item["uuid"] for item in project_subnets_ref_list] + account_map = {} for account in account_list: account_uuid = account["uuid"] @@ -693,48 +761,12 @@ def sync(cls): # for PC accounts add subnets to subnet_to_account_map. Will use it to populate whitelisted_subnets if account_type == "nutanix_pc": - project_network_list = ( - subnets_ref_list - if account_uuid == local_nutanix_pc_account_uuid - else external_subnets_ref_list + whitelisted_subnets[account_uuid] = list( + set(project_subnet_uuids) + & set(ntnx_pc_account_subnet_map[account_uuid]) ) - subnet_uuids = [item["uuid"] for item in project_network_list] - if not subnet_uuids: - continue - - AhvVmProvider = cls.get_provider_plugin("AHV_VM") - AhvObj = AhvVmProvider.get_api_obj() - - filter_query = "_entity_id_=={}".format("|".join(subnet_uuids)) - LOG.debug( - "fetching following subnets {} for nutanix_pc account_uuid {}".format( - subnet_uuids, account_uuid - ) - ) - try: - res = AhvObj.subnets( - account_uuid=account_uuid, filter_query=filter_query - ) - except Exception: - LOG.warning( - "Unable to fetch subnets for Nutanix_PC Account(uuid={})".format( - account_uuid - ) - ) - continue - for row in res["entities"]: - subnet_to_account_map[row["metadata"]["uuid"]] = account_uuid accounts_data = json.dumps(account_map) - - subnets_ref_list = entity["status"]["resources"]["subnet_reference_list"] - - whitelisted_subnets = dict() - for subnet_uuid, account_uuid in subnet_to_account_map.items(): - if not whitelisted_subnets.get(account_uuid): - whitelisted_subnets[account_uuid] = [] - whitelisted_subnets[account_uuid].append(subnet_uuid) - whitelisted_subnets = json.dumps(whitelisted_subnets) cls.create_entry( name=name, @@ -773,6 +805,109 @@ def get_entity_data_using_uuid(cls, uuid, **kwargs): except DoesNotExist: return dict() + @classmethod + def fetch_one(cls, uuid): + """returns project data for project uuid""" + + # update by latest data + client = get_api_client() + + payload = {"length": 200, "offset": 0, "filter": "state!=DELETED;type!=nutanix"} + account_uuid_type_map = client.account.get_uuid_type_map(payload) + + res, err = client.project.read(uuid) + if err: + LOG.exception("[{}] - {}".format(err["code"], err["error"])) + return {} + + project_data = res.json() + project_name = project_data["spec"]["name"] + account_list = project_data["spec"]["resources"].get( + "account_reference_list", [] + ) + project_subnets_ref_list = project_data["spec"].get("resources", {}).get( + "external_network_list", [] + ) + project_data["spec"].get("resources", {}).get("subnet_reference_list", []) + project_subnet_uuids = [item["uuid"] for item in project_subnets_ref_list] + + # populating a map to lookup the account to which a subnet belongs + whitelisted_subnets = dict() + account_map = dict() + for _acc in account_list: + account_uuid = _acc["uuid"] + + # As projects may have deleted accounts registered + if account_uuid not in account_uuid_type_map: + continue + account_type = account_uuid_type_map[account_uuid] + if account_type not in account_map: + account_map[account_type] = [account_uuid] + else: + account_map[account_type].append(account_uuid) + + if account_type == "nutanix_pc": + AhvVmProvider = cls.get_provider_plugin("AHV_VM") + AhvObj = AhvVmProvider.get_api_obj() + + filter_query = "_entity_id_=={}".format("|".join(project_subnet_uuids)) + LOG.debug( + "fetching following subnets {} for nutanix_pc account_uuid {}".format( + project_subnet_uuids, account_uuid + ) + ) + try: + res = AhvObj.subnets( + account_uuid=account_uuid, filter_query=filter_query + ) + except Exception: + LOG.warning( + "Unable to fetch subnets for Nutanix_PC Account(uuid={})".format( + account_uuid + ) + ) + continue + + whitelisted_subnets[account_uuid] = [ + row["metadata"]["uuid"] for row in res["entities"] + ] + + accounts_data = json.dumps(account_map) + whitelisted_subnets = json.dumps(whitelisted_subnets) + + return { + "name": project_name, + "uuid": uuid, + "accounts_data": accounts_data, + "whitelisted_subnets": whitelisted_subnets, + } + + @classmethod + def add_one(cls, uuid, **kwargs): + """adds one entry to project table""" + + db_data = cls.fetch_one(uuid, **kwargs) + cls.create_entry(**db_data) + + @classmethod + def delete_one(cls, uuid, **kwargs): + """deletes one entity from project""" + + obj = cls.get(cls.uuid == uuid) + obj.delete_instance() + + @classmethod + def update_one(cls, uuid, **kwargs): + """updates single entry to project table""" + + db_data = cls.fetch_one(uuid, **kwargs) + q = cls.update( + { + cls.accounts_data: db_data["accounts_data"], + cls.whitelisted_subnets: db_data["whitelisted_subnets"], + } + ).where(cls.uuid == uuid) + q.execute() + class Meta: database = dsl_database primary_key = CompositeKey("name", "uuid") @@ -907,6 +1042,74 @@ def get_entity_data_using_uuid(cls, uuid, **kwargs): except DoesNotExist: return dict() + @classmethod + def fetch_one(cls, uuid): + """fetches one entity data""" + + client = get_api_client() + res, err = client.environment.read(uuid) + if err: + LOG.exception("[{}] - {}".format(err["code"], err["error"])) + return {} + + entity = res.json() + env_name = entity["status"]["name"] + project_uuid = entity["metadata"].get("project_reference", {}).get("uuid", "") + infra_inclusion_list = entity["status"]["resources"].get( + "infra_inclusion_list", [] + ) + account_map = {} + for infra in infra_inclusion_list: + _account_type = infra["type"] + _account_data = dict( + uuid=infra["account_reference"]["uuid"], + name=infra["account_reference"].get("name", ""), + ) + + if _account_type == "nutanix_pc": + subnet_refs = infra.get("subnet_references", []) + _account_data["subnet_uuids"] = [row["uuid"] for row in subnet_refs] + + if not account_map.get(_account_type): + account_map[_account_type] = [] + + account_map[_account_type].append(_account_data) + + accounts_data = json.dumps(account_map) + return { + "name": env_name, + "uuid": uuid, + "accounts_data": accounts_data, + "project_uuid": project_uuid, + } + + @classmethod + def add_one(cls, uuid, **kwargs): + """adds one entry to env table""" + + db_data = cls.fetch_one(uuid, **kwargs) + cls.create_entry(**db_data) + + @classmethod + def delete_one(cls, uuid, **kwargs): + """deletes one entity from env table""" + + obj = cls.get(cls.uuid == uuid) + obj.delete_instance() + + @classmethod + def update_one(cls, uuid, **kwargs): + """updates single entry to env table""" + + db_data = cls.fetch_one(uuid, **kwargs) + q = cls.update( + { + cls.accounts_data: db_data["accounts_data"], + cls.project_uuid: db_data["project_uuid"], + } + ).where(cls.uuid == uuid) + q.execute() + class Meta: database = dsl_database primary_key = CompositeKey("name", "uuid") @@ -1045,6 +1248,62 @@ def get_entity_data_using_uuid(cls, uuid, **kwargs): except DoesNotExist: return dict() + @classmethod + def fetch_one(cls, uuid): + """fetches one entity data""" + + client = get_api_client() + res, err = client.user.read(uuid) + if err: + LOG.exception("[{}] - {}".format(err["code"], err["error"])) + return {} + + entity = res.json() + name = entity["status"]["name"] + display_name = entity["status"]["resources"].get("display_name") or "" + directory_service_user = ( + entity["status"]["resources"].get("directory_service_user") or dict() + ) + directory_service_ref = ( + directory_service_user.get("directory_service_reference") or dict() + ) + directory_service_name = directory_service_ref.get("name", "LOCAL") + + return { + "name": name, + "uuid": uuid, + "display_name": display_name, + "directory": directory_service_name, + } + + @classmethod + def add_one(cls, uuid, **kwargs): + """adds one entry to env table""" + + db_data = cls.fetch_one(uuid, **kwargs) + cls.create_entry(**db_data) + + @classmethod + def delete_one(cls, uuid, **kwargs): + """deletes one entity from env table""" + + obj = cls.get(cls.uuid == uuid) + obj.delete_instance() + + @classmethod + def update_one(cls, uuid, **kwargs): + """updates single entry to env table""" + + db_data = cls.fetch_one(uuid, **kwargs) + q = cls.update( + { + cls.name: db_data["name"], + cls.display_name: db_data["display_name"], + cls.directory: db_data["directory"], + } + ).where(cls.uuid == uuid) + q.execute() + class Meta: database = dsl_database primary_key = CompositeKey("name", "uuid") @@ -1137,6 +1396,35 @@ def get_entity_data_using_uuid(cls, uuid, **kwargs): except DoesNotExist: return dict() + @classmethod + def fetch_one(cls, uuid): + """fetches one entity data""" + + client = get_api_client() + res, err = client.role.read(uuid) + if err: + LOG.exception("[{}] - {}".format(err["code"], err["error"])) + return {} + + entity = res.json() + name = entity["status"]["name"] + + return {"name": name, "uuid": uuid} + + @classmethod + def add_one(cls, uuid, **kwargs): + """adds one entry to env table""" + + db_data = cls.fetch_one(uuid, **kwargs) + cls.create_entry(**db_data) + + @classmethod + def delete_one(cls, uuid, **kwargs): + """deletes one entity from env table""" + + obj = cls.get(cls.uuid == uuid) + obj.delete_instance() + class Meta: database = dsl_database primary_key = CompositeKey("name", "uuid") @@ -1376,6 +1664,67 @@ def get_entity_data_using_uuid(cls, uuid, **kwargs): except DoesNotExist: return dict() + @classmethod + def fetch_one(cls, uuid): + """fetches one entity data""" + + client = get_api_client() + res, err = client.group.read(uuid) + if err: + LOG.exception("[{}] - {}".format(err["code"], err["error"])) + return {} + + entity = res.json() + e_resources = entity["status"]["resources"] + + directory_service_user_group = ( + e_resources.get("directory_service_user_group") or dict() + ) + distinguished_name = directory_service_user_group.get("distinguished_name") + + directory_service_ref = ( + directory_service_user_group.get("directory_service_reference") or dict() + ) + directory_service_name = directory_service_ref.get("name", "") + + display_name = e_resources.get("display_name", "") + uuid = entity["metadata"]["uuid"] + + return { + "name": distinguished_name, + "uuid": uuid, + "display_name": display_name, + "directory": directory_service_name, + } + + @classmethod + def add_one(cls, uuid, **kwargs): + """adds one entry to env table""" + + db_data = cls.fetch_one(uuid, **kwargs) + cls.create_entry(**db_data) + + @classmethod + def delete_one(cls, uuid, **kwargs): + """deletes one entity from env table""" + + obj = cls.get(cls.uuid == uuid) + obj.delete_instance() + + @classmethod + def update_one(cls, uuid, **kwargs): + """updates single entry to env table""" + + db_data = cls.fetch_one(uuid, **kwargs) + q = cls.update( + { + cls.name: db_data["name"], + cls.display_name: db_data["display_name"], + cls.directory: db_data["directory"], + } + ).where(cls.uuid == uuid) + q.execute() + class Meta: database = dsl_database primary_key = CompositeKey("name", "uuid") diff --git a/calm/dsl/store/cache.py b/calm/dsl/store/cache.py index 64442b5e1..a23d36168 100644 --- a/calm/dsl/store/cache.py +++ b/calm/dsl/store/cache.py @@ -53,15 +53,7 @@ def get_cache_tables(cls, sync_version=False): def get_entity_data(cls, entity_type, name, **kwargs): """returns entity data corresponding to supplied entry using entity name""" - cache_tables = cls.get_cache_tables() - if not entity_type: - LOG.error("No entity type for cache supplied") - sys.exit(-1) - - db_cls = cache_tables.get(entity_type, None) - if not db_cls: - LOG.error("Unknown entity type ({}) supplied".format(entity_type)) - sys.exit(-1) + db_cls = cls.get_entity_db_table_object(entity_type) try: res = db_cls.get_entity_data(name=name, **kwargs) @@ -87,15 +79,7 @@ def get_entity_data(cls, entity_type, name, **kwargs): def get_entity_data_using_uuid(cls, entity_type, uuid, *args, **kwargs): """returns entity data corresponding to supplied entry using entity uuid""" - cache_tables = cls.get_cache_tables() - if not entity_type: - LOG.error("No entity type for cache supplied") - sys.exit(-1) - - db_cls = cache_tables.get(entity_type, None) - if not db_cls: - LOG.error("Unknown entity type ({}) supplied".format(entity_type)) - sys.exit(-1) + db_cls = cls.get_entity_db_table_object(entity_type) try: res = db_cls.get_entity_data_using_uuid(uuid=uuid, **kwargs) @@ -117,6 +101,43 @@ def get_entity_data_using_uuid(cls, entity_type, uuid, *args, **kwargs): return res + @classmethod + def get_entity_db_table_object(cls, entity_type): + """returns database entity table object corresponding to entity""" + + if not entity_type: + LOG.error("No entity type for cache supplied") + sys.exit(-1) + + cache_tables = cls.get_cache_tables() + db_cls = cache_tables.get(entity_type, None) + if not db_cls: + LOG.error("Unknown entity type ({}) supplied".format(entity_type)) + sys.exit(-1) + + return db_cls + + @classmethod + def add_one(cls, entity_type, uuid, **kwargs): + """adds one entity to entity db object""" + + db_obj = cls.get_entity_db_table_object(entity_type) + db_obj.add_one(uuid, **kwargs) + + @classmethod + def delete_one(cls, entity_type, uuid, **kwargs): + """adds one entity to entity db object""" + + db_obj = cls.get_entity_db_table_object(entity_type) + db_obj.delete_one(uuid, **kwargs) + + @classmethod + def update_one(cls, entity_type, uuid, **kwargs): + """adds one entity to entity db object""" + + db_obj = cls.get_entity_db_table_object(entity_type) + db_obj.update_one(uuid, **kwargs) + @classmethod def sync(cls): """Sync cache by latest data""" @@ -186,3 +207,21 @@ def show_data(cls): "Cache error occurred. Please update cache using 'calm update cache' command" ) sys.exit(-1) + + @classmethod + def show_table(cls, cache_type): + """sync the cache table provided in cache_type list""" + + if not cache_type: + return + + cache_type = [cache_type] if not isinstance(cache_type, list) else cache_type + cache_table_map = cls.get_cache_tables() + + for _ct in cache_type: + if _ct not in cache_table_map: + LOG.warning("Invalid cache_type ('{}') provided".format(cache_type)) + continue + + cache_table = cache_table_map[_ct] + cache_table.show_data() diff --git a/release-notes/3.4.0/README.md b/release-notes/3.4.0/README.md index 7fc0d6593..57dc52a2d 100644 --- a/release-notes/3.4.0/README.md +++ b/release-notes/3.4.0/README.md @@ -93,4 +93,8 @@ ) ] +# Cache +- Allow cache update for individual enities. CLI command: `calm update cache --entity [ahv_subnet|ahv_disk_image|account|project|environment|user|role|directory_service|user_group|ahv_network_function_chain|app_protection_policy]` +- Fixed issue [#184](https://github.com/nutanix/calm-dsl/issues/184). Allow atomic updates in cache for entity crud operations i.e. project/environment crud operations etc. + *** Please look for Calm 3.4.0 overview [here](https://drive.google.com/file/d/1oITux-OVntRQ3eh8v2h-dRfCEnqpxcOl/view?usp=sharing)