Skip to content

Commit

Permalink
Perform Cache Atomic updates for entity crud operations (#223)
Browse files Browse the repository at this point in the history
* Add basic configuration for atomic udates

* minor fix for subnet query

* fix atomic operations for environment

* adding atomic ops for users and group, roles cache

* Adding changes for cache object

* Improving projects update cache

* Plugging atomic operarions for users, groups, projects env for commands

* Fixing update_one helper

* Add calm update cache for individual entity

* Added cache changes in readme

* Fixing get_cache_table_types helper

* Correcting comment
  • Loading branch information
abhijeetkaurav1st authored Mar 24, 2022
1 parent c1a3a6b commit ae470ec
Show file tree
Hide file tree
Showing 10 changed files with 584 additions and 151 deletions.
1 change: 1 addition & 0 deletions calm/dsl/api/resource.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,7 @@ def get_uuid_name_map(self, params={}):

return uuid_name_map

# TODO: Fix return type of list_all helper
def list_all(self, api_limit=250, base_params=None, ignore_error=False):
"""returns the list of entities"""

Expand Down
11 changes: 3 additions & 8 deletions calm/dsl/api/setting.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,17 +29,12 @@ def vms_list(self, id, params=dict()):
def get_uuid_type_map(self, params=dict()):
"""returns map containing {account_uuid: account_type} details"""

response, err = self.list(params)
res_entities, err = self.list_all(base_params=params, ignore_error=True)
if err:
raise Exception("[{}] - {}".format(err["code"], err["error"]))

response = response.json()
total_matches = response["metadata"]["total_matches"]
if total_matches == 0:
return {}
raise Exception(err)

uuid_type_map = {}
for entity in response["entities"]:
for entity in res_entities:
a_uuid = entity["metadata"]["uuid"]
a_type = entity["status"]["resources"]["type"]
uuid_type_map[a_uuid] = a_type
Expand Down
32 changes: 29 additions & 3 deletions calm/dsl/cli/cache_commands.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import datetime
import click

from calm.dsl.store import Cache
from calm.dsl.constants import CACHE

from .main import show, update, clear
from .utils import highlight_text
Expand All @@ -9,6 +11,19 @@
LOG = get_logging_handle(__name__)


def get_cache_table_types():
"""returns cache table types"""

# Note do not use Cache.get_cache_tables().keys(),
# It will break, container initialization due to cyclic dependency
table_types = []
for attr in CACHE.ENTITY.__dict__:
if not (attr.startswith("__")):
table_types.append(getattr(CACHE.ENTITY, attr))

return table_types


@show.command("cache")
def show_cache_command():
"""Display the cache data"""
Expand All @@ -25,9 +40,20 @@ def clear_cache():


@update.command("cache")
def update_cache():
@click.option(
"--entity",
"-e",
default=None,
help="Cache entity, if not given will update whole cache",
type=click.Choice(get_cache_table_types()),
)
def update_cache(entity):
"""Update the data for dynamic entities stored in the cache"""

Cache.sync()
Cache.show_data()
if entity:
Cache.sync_table(entity)
Cache.show_table(entity)
else:
Cache.sync()
Cache.show_data()
LOG.info(highlight_text("Cache updated at {}".format(datetime.datetime.now())))
23 changes: 11 additions & 12 deletions calm/dsl/cli/environments.py
Original file line number Diff line number Diff line change
Expand Up @@ -242,11 +242,12 @@ def create_environment_from_dsl_file(
click.echo(json.dumps(env_std_out, indent=4, separators=(",", ": ")))

if no_cache_update:
LOG.info("skipping environments and projects cache update")
LOG.info("skipping environments cache update")
else:
LOG.info("Updating projects and environments cache ...")
Cache.sync_table(cache_type=CACHE.ENTITY.PROJECT)
Cache.sync_table(cache_type=CACHE.ENTITY.ENVIRONMENT)
LOG.info("Updating environments cache ...")
Cache.add_one(
entity_type=CACHE.ENTITY.ENVIRONMENT, uuid=env_std_out.get("uuid")
)
LOG.info("[Done]")


Expand Down Expand Up @@ -314,11 +315,10 @@ def update_environment_from_dsl_file(
click.echo(json.dumps(stdout_dict, indent=4, separators=(",", ": ")))

if no_cache_update:
LOG.info("skipping environments and projects cache update")
LOG.info("skipping environments cache update")
else:
LOG.info("Updating projects and environments cache ...")
Cache.sync_table(cache_type=CACHE.ENTITY.PROJECT)
Cache.sync_table(cache_type=CACHE.ENTITY.ENVIRONMENT)
LOG.info("Updating environments cache ...")
Cache.update_one(entity_type=CACHE.ENTITY.ENVIRONMENT, uuid=environment_id)
LOG.info("[Done]")


Expand Down Expand Up @@ -504,9 +504,8 @@ def delete_environment(environment_name, project_name, no_cache_update=False):
update_project_envs(project_name, remove_env_uuids=[environment_id])

if no_cache_update:
LOG.info("skipping environments and projects cache update")
LOG.info("skipping environments cache update")
else:
LOG.info("Updating environments and projects cache ...")
Cache.sync_table(cache_type=CACHE.ENTITY.PROJECT)
Cache.sync_table(cache_type=CACHE.ENTITY.ENVIRONMENT)
LOG.info("Updating environments cache ...")
Cache.delete_one(entity_type=CACHE.ENTITY.ENVIRONMENT, uuid=environment_id)
LOG.info("[Done]")
16 changes: 11 additions & 5 deletions calm/dsl/cli/groups.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,6 +133,7 @@ def create_group(name):
}
click.echo(json.dumps(stdout_dict, indent=4, separators=(",", ": ")))

user_group_uuid = res["metadata"]["uuid"]
LOG.info("Polling on user-group creation task")
task_state = watch_task(
res["status"]["execution_context"]["task_uuid"], poll_interval=5
Expand All @@ -143,7 +144,7 @@ def create_group(name):

# Update user-groups in cache
LOG.info("Updating user-groups cache ...")
Cache.sync_table(cache_type=CACHE.ENTITY.USER_GROUP)
Cache.add_one(entity_type=CACHE.ENTITY.USER_GROUP, uuid=user_group_uuid)
LOG.info("[Done]")


Expand All @@ -152,12 +153,15 @@ def delete_group(group_names):

client = get_api_client()

deleted_group_uuids = []
for name in group_names:
group_ref = Ref.Group(name)
res, err = client.group.delete(group_ref["uuid"])
if err:
raise Exception("[{}] - {}".format(err["code"], err["error"]))
LOG.exception("[{}] - {}".format(err["code"], err["error"]))
sys.exit(-1)

deleted_group_uuids.append(group_ref["uuid"])
LOG.info("Polling on user-group deletion task")
res = res.json()
task_state = watch_task(
Expand All @@ -170,6 +174,8 @@ def delete_group(group_names):
sys.exit(-1)

# Update user-groups in cache
LOG.info("Updating user-groups cache ...")
Cache.sync_table(cache_type=CACHE.ENTITY.USER_GROUP)
LOG.info("[Done]")
if deleted_group_uuids:
LOG.info("Updating user-groups cache ...")
for _group_uuid in deleted_group_uuids:
Cache.delete_one(entity_type=CACHE.ENTITY.USER_GROUP, uuid=_group_uuid)
LOG.info("[Done]")
56 changes: 32 additions & 24 deletions calm/dsl/cli/projects.py
Original file line number Diff line number Diff line change
Expand Up @@ -378,11 +378,12 @@ def create_project_from_dsl(
project_name = project_data["name"]
project_uuid = project_data["uuid"]

# Update project in cache
LOG.info("Updating projects cache")
Cache.add_one(entity_type=CACHE.ENTITY.PROJECT, uuid=project_uuid)
LOG.info("[Done]")

if envs:
# Update project in cache
LOG.info("Updating projects cache")
Cache.sync_table("project")
LOG.info("[Done]")

# As ahv helpers in environment should use account from project accounts
# updating the context
Expand Down Expand Up @@ -428,14 +429,16 @@ def create_project_from_dsl(
# Reset the context changes
ContextObj.reset_configuration()

if no_cache_update:
LOG.info("skipping projects and environments cache update")
else:
# Update projects in cache
LOG.info("Updating projects and environments cache ...")
Cache.sync_table(cache_type=CACHE.ENTITY.PROJECT)
Cache.sync_table(cache_type=CACHE.ENTITY.ENVIRONMENT)
LOG.info("[Done]")
if no_cache_update:
LOG.info("Skipping environments cache update")
else:
# Update environments in cache
LOG.info("Updating environments cache ...")
for _e_item in env_ref_list:
Cache.add_one(
entity_type=CACHE.ENTITY.ENVIRONMENT, uuid=_e_item["uuid"]
)
LOG.info("[Done]")


def describe_project(project_name, out):
Expand Down Expand Up @@ -568,20 +571,21 @@ def describe_project(project_name, out):
def delete_project(project_names, no_cache_update=False):

client = get_api_client()
params = {"length": 1000}
project_name_uuid_map = client.project.get_name_uuid_map(params)
projects_deleted = False
project_name_uuid_map = client.project.get_name_uuid_map()
deleted_projects_uuids = []
for project_name in project_names:
project_id = project_name_uuid_map.get(project_name, "")
if not project_id:
LOG.warning("Project {} not found.".format(project_name))
continue

projects_deleted = True
LOG.info("Deleting project '{}'".format(project_name))
res, err = client.project.delete(project_id)
if err:
raise Exception("[{}] - {}".format(err["code"], err["error"]))
LOG.exception("[{}] - {}".format(err["code"], err["error"]))
continue

deleted_projects_uuids.append(project_id)

LOG.info("Polling on project deletion task")
res = res.json()
Expand All @@ -593,13 +597,13 @@ def delete_project(project_names, no_cache_update=False):
sys.exit(-1)

# Update projects in cache if any project has been deleted
if projects_deleted:
if deleted_projects_uuids:
if no_cache_update:
LOG.info("skipping projects and environment cache update")
LOG.info("skipping projects cache update")
else:
LOG.info("Updating projects and environment cache ...")
Cache.sync_table(cache_type=CACHE.ENTITY.PROJECT)
Cache.sync_table(cache_type=CACHE.ENTITY.ENVIRONMENT)
LOG.info("Updating projects cache ...")
for _proj_id in deleted_projects_uuids:
Cache.delete_one(entity_type=CACHE.ENTITY.PROJECT, uuid=_proj_id)
LOG.info("[Done]")


Expand Down Expand Up @@ -790,10 +794,10 @@ def update_project_from_dsl(project_name, project_file, no_cache_update=False):
sys.exit(-1)

if no_cache_update:
LOG.info("skipping projects cache update")
LOG.info("Skipping projects cache update")
else:
LOG.info("Updating projects cache ...")
Cache.sync_table(cache_type=CACHE.ENTITY.PROJECT)
Cache.update_one(entity_type=CACHE.ENTITY.PROJECT, uuid=project_uuid)
LOG.info("[Done]")


Expand Down Expand Up @@ -992,6 +996,10 @@ def update_project_using_cli_switches(
LOG.exception("Project updation task went to {} state".format(task_state))
sys.exit(-1)

LOG.info("Updating projects cache ...")
Cache.update_one(entity_type=CACHE.ENTITY.PROJECT, uuid=project_uuid)
LOG.info("[Done]")


def remove_users_from_project_acps(project_uuid, remove_user_list, remove_group_list):

Expand Down
16 changes: 11 additions & 5 deletions calm/dsl/cli/users.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,7 @@ def create_user(name, directory_service):
}
click.echo(json.dumps(stdout_dict, indent=4, separators=(",", ": ")))

user_uuid = res["metadata"]["uuid"]
LOG.info("Polling on user creation task")
task_state = watch_task(
res["status"]["execution_context"]["task_uuid"], poll_interval=5
Expand All @@ -137,7 +138,7 @@ def create_user(name, directory_service):

# Update users in cache
LOG.info("Updating users cache ...")
Cache.sync_table(cache_type=CACHE.ENTITY.USER)
Cache.add_one(entity_type=CACHE.ENTITY.USER, uuid=user_uuid)
LOG.info("[Done]")


Expand All @@ -147,6 +148,7 @@ def delete_user(user_names):
params = {"length": 1000}
user_name_uuid_map = client.user.get_name_uuid_map(params)

deleted_user_uuids = []
for name in user_names:
user_uuid = user_name_uuid_map.get(name, "")
if not user_uuid:
Expand All @@ -155,8 +157,10 @@ def delete_user(user_names):

res, err = client.user.delete(user_uuid)
if err:
raise Exception("[{}] - {}".format(err["code"], err["error"]))
LOG.exception("[{}] - {}".format(err["code"], err["error"]))
sys.exit(-1)

deleted_user_uuids.append(user_uuid)
LOG.info("Polling on user deletion task")
res = res.json()
task_state = watch_task(
Expand All @@ -167,6 +171,8 @@ def delete_user(user_names):
sys.exit(-1)

# Update users in cache
LOG.info("Updating users cache ...")
Cache.sync_table(cache_type=CACHE.ENTITY.USER)
LOG.info("[Done]")
if deleted_user_uuids:
LOG.info("Updating users cache ...")
for _user_uuid in deleted_user_uuids:
Cache.delete_one(entity_type=CACHE.ENTITY.USER, uuid=_user_uuid)
LOG.info("[Done]")
Loading

0 comments on commit ae470ec

Please sign in to comment.