Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Create cache hash after command #64

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 24 additions & 4 deletions test/test_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,10 +70,6 @@ def test_use_cache(tf):
),
], indirect=True)
def test_no_use_cache(tf):
"""
Ensures cache is not used and runs the execute_command() for every call of
the method
"""
expected_call_count = 2
for method in cache_methods:
with patch.object(tf, 'execute_command', wraps=tf.execute_command) as mock_execute_command:
Expand All @@ -84,6 +80,10 @@ def test_no_use_cache(tf):

@pytest.mark.parametrize("tf", [True], indirect=True)
def test_use_cache_with_same_tf_var_file(tf, tmp_path):
"""
Ensures cache is used if the same tf_var_file argument is passed
within subsequent method calls
"""
tf_var_file_methods = ["plan", "apply", "destroy"]

tf_vars_file = tmp_path / (str(uuid.uuid4()) + '.json')
Expand All @@ -99,6 +99,10 @@ def test_use_cache_with_same_tf_var_file(tf, tmp_path):

@pytest.mark.parametrize("tf", [True], indirect=True)
def test_use_cache_with_new_tf_var_file(tf, tmp_path):
"""
Ensures cache is not used if a different tf_var_file argument is passed
within subsequent method calls
"""
tf_var_file_methods = ["plan", "apply", "destroy"]
expected_call_count = 2

Expand All @@ -116,6 +120,10 @@ def test_use_cache_with_new_tf_var_file(tf, tmp_path):

@pytest.mark.parametrize("tf", [True], indirect=True)
def test_use_cache_with_new_extra_files(tf, tmp_path):
"""
Ensures cache is not used if a different extra_files argument is passed
within subsequent method calls
"""
expected_call_count = 2
tf_vars_file = tmp_path / (str(uuid.uuid4()) + '.json')
tf_vars_file.write_text(json.dumps({"foo": "old"}))
Expand All @@ -130,6 +138,10 @@ def test_use_cache_with_new_extra_files(tf, tmp_path):

@pytest.mark.parametrize("tf", [True], indirect=True)
def test_use_cache_with_same_extra_files(tf, tmp_path):
"""
Ensures cache is used if the same extra_files argument is passed
within subsequent method calls
"""
tf_vars_file = tmp_path / (str(uuid.uuid4()) + '.json')
tf_vars_file.write_text(json.dumps({"foo": "old"}))

Expand All @@ -142,6 +154,10 @@ def test_use_cache_with_same_extra_files(tf, tmp_path):

@pytest.mark.parametrize("tf", [True], indirect=True)
def test_use_cache_with_new_env(tf):
"""
Ensures cache is not used if the env attribute is updated
before subsequent method calls
"""
expected_call_count = 2
for method in cache_methods:
with patch.object(tf, 'execute_command', wraps=tf.execute_command) as mock_execute_command:
Expand All @@ -167,6 +183,10 @@ def dummy_tf_filepath(tf):

@pytest.mark.parametrize("tf", [True], indirect=True)
def test_use_cache_with_new_tf_content(tf, dummy_tf_filepath):
"""
Ensures cache is not used if the tfdir directory is updated
before subsequent method calls
"""
expected_call_count = 2
for method in cache_methods:
with patch.object(tf, 'execute_command', wraps=tf.execute_command) as mock_execute_command:
Expand Down
86 changes: 50 additions & 36 deletions tftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -375,21 +375,58 @@ def _abspath(self, path):
"""Make relative path absolute from base dir."""
return path if os.path.isabs(path) else os.path.join(self._basedir, path)

def _dirhash(self, directory, hash, ignore_hidden=False, excluded_extensions=[]):
def _dirhash(self, directory, hash, ignore_hidden=False, exclude_directories=[], excluded_extensions=[]):
"""Returns hash of directory's file contents"""
assert Path(directory).is_dir()
for path in sorted(Path(directory).iterdir(), key=lambda p: str(p).lower()):
try:
dir_iter = sorted(Path(directory).iterdir(),
key=lambda p: str(p).lower())
except FileNotFoundError:
return hash
for path in dir_iter:
if path.is_file():
if not ignore_hidden and path.name.startswith("."):
if ignore_hidden and path.name.startswith("."):
continue
if path.suffix in excluded_extensions:
continue
with open(path, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hash.update(chunk)
elif path.is_dir():
hash = self._dirhash(path, hash, ignore_hidden=ignore_hidden)
elif path.is_dir() and path.name not in exclude_directories:
hash = self._dirhash(path, hash, ignore_hidden=ignore_hidden,
exclude_directories=exclude_directories, excluded_extensions=excluded_extensions)
return hash

def generate_cache_hash(self, method_kwargs):
"""Returns a hash value using the instance's attributes and method keyword arguments"""
params = {
**{
k: v for k, v in self.__dict__.items()
# only uses instance attributes that are involved in the results of
# the decorated method
if k in ["binary", "_basedir", "tfdir", "_env"]
},
**method_kwargs,
}

# creates hash of file contents
for path_param in ["extra_files", "tf_var_file"]:
if path_param in method_kwargs:
if isinstance(method_kwargs[path_param], list):
params[path_param] = [
sha1(open(fp, 'rb').read()).hexdigest() for fp in method_kwargs[path_param]]
else:
params[path_param] = sha1(
open(method_kwargs[path_param], 'rb').read()).hexdigest()

# creates hash of all file content within tfdir
# excludes .terraform/, hidden files, tfstate files from being used for hash
params["tfdir"] = self._dirhash(
self.tfdir, sha1(), ignore_hidden=True, exclude_directories=[".terraform"], excluded_extensions=['.backup', '.tfstate']).hexdigest()

return sha1(json.dumps(params, sort_keys=True,
default=str).encode("cp037")).hexdigest() + ".pickle"

def _cache(func):

def cache(self, **kwargs):
Expand All @@ -410,39 +447,9 @@ def cache(self, **kwargs):

cache_dir = self.cache_dir / \
Path(self.tfdir.strip("/")) / Path(func.__name__)
# creates cache dir if not exists
cache_dir.mkdir(parents=True, exist_ok=True)

params = {
**{
k: v for k, v in self.__dict__.items()
# only uses instance attributes that are involved in the results of
# the decorated method
if k in ["binary", "_basedir", "tfdir", "_env"]
},
**kwargs,
}

# creates hash of file contents
for path_param in ["extra_files", "tf_var_file"]:
if path_param in kwargs:
if isinstance(kwargs[path_param], list):
params[path_param] = [
sha1(open(fp, 'rb').read()).hexdigest() for fp in kwargs[path_param]]
else:
params[path_param] = sha1(
open(kwargs[path_param], 'rb').read()).hexdigest()

# creates hash of all file content within tfdir
# excludes hidden files from being used within hash (ignores .terraform/ or .terragrunt-cache/)
# and excludes any local tfstate files

params["tfdir"] = self._dirhash(
self.tfdir, sha1(), ignore_hidden=True, excluded_extensions=['.backup', '.tfstate']).hexdigest()

hash_filename = sha1(
json.dumps(params, sort_keys=True,
default=str).encode("cp037")).hexdigest() + ".pickle"
hash_filename = self.generate_cache_hash(kwargs)
cache_key = cache_dir / hash_filename
_LOGGER.debug("Cache key: %s", cache_key)

Expand All @@ -458,6 +465,13 @@ def cache(self, **kwargs):
out = func(self, **kwargs)

if out:
# the hash value will now include any changes
# to the tfdir directory including any terragrunt
# generated files
hash_filename = self.generate_cache_hash(kwargs)
cache_key = cache_dir / hash_filename
_LOGGER.debug("Cache key: %s", cache_key)

_LOGGER.info("Writing command to cache")
try:
f = cache_key.open("wb")
Expand Down