Skip to content

Commit

Permalink
Small tidy and clarify.
Browse files Browse the repository at this point in the history
  • Loading branch information
pp-mo committed Aug 23, 2023
1 parent 8cfcc06 commit 76a3974
Show file tree
Hide file tree
Showing 2 changed files with 31 additions and 27 deletions.
35 changes: 19 additions & 16 deletions lib/iris/common/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -141,15 +141,19 @@ def __new__(mcs, name, bases, namespace):


#
# "Extended" dictionary access, for dict-like operations which work with regular dicts,
# but in specific extended ways for 'CubeAttrsDict' style split dictionaries
# Dictionary operations for dealing with the CubeAttrsDict "split"-style attribute
# dictionaries.
#
# The idea here is to convert a split-dictionary into a "plain" one for calculations,
# whose keys are all pairs of the form ('global', <keyname>) or ('local', <keyname>).
# whose keys are all pairs of the form ('global', <keyname>) or ('local', <keyname>).
# And to convert back again after the operation, if the result is a dictionary.
# For "strict" operations this probably does all that is needed. For lenient ones,
# it's not so clear whether local+global keys with the same attribute name "should",
# in some cases, affect one another in some ways
#
# For "strict" operations this clearly does all that is needed. For lenient ones,
# we _might_ want for local+global attributes of the same name to interact.
# However, on careful consideration, it seems that this is not actually desirable for
# any of the common-metadata operations.
# So, we simply treat "global" and "local" attributes of the same name as entirely
# independent. Which happily is also the easiest to code, and to explain.
#
def xd_is_split(dic):
"""Detect whether a dictionary is a "split-attribute" type."""
Expand All @@ -167,16 +171,14 @@ def xd_to_normal(dic):
"""
Convert the input to a 'normal' dict with paired keys, if it is split-attrs type
"""
if xd_is_split(dic):
result = dict(_global_local_items(dic))
else:
result = dic
return result
return dict(_global_local_items(dic))


def xd_from_normal(dic):
"""
Convert an input with global//local paired keys back into a split-attrs dict.
Convert an input with global/local paired keys back into a split-attrs dict.
For now, this is always+only a CubeAttrsDict.
"""
from iris.cube import CubeAttrsDict

Expand Down Expand Up @@ -441,9 +443,10 @@ def func(field):
@staticmethod
def _combine_lenient_attributes(left, right):
"""Leniently combine the dictionary members together."""
# Copy the dictionaries, convert from split form if required
# Copy the dictionaries.
left = deepcopy(left)
right = deepcopy(right)
# convert from split form if required
is_split, left, right = xd_normalise_input_pair(left, right)
# Use xxhash to perform an extremely fast non-cryptographic hash of
# each dictionary key rvalue, thus ensuring that the dictionary is
Expand Down Expand Up @@ -472,9 +475,10 @@ def _combine_lenient_attributes(left, right):
@staticmethod
def _combine_strict_attributes(left, right):
"""Perform strict combination of the dictionary members."""
# Copy the dictionaries, convert from split form if required
# Copy the dictionaries.
left = deepcopy(left)
right = deepcopy(right)
# convert from split form if required
is_split, left, right = xd_normalise_input_pair(left, right)
# Use xxhash to perform an extremely fast non-cryptographic hash of
# each dictionary key rvalue, thus ensuring that the dictionary is
Expand Down Expand Up @@ -542,8 +546,6 @@ def _compare_lenient_attributes(left, right):

# Convert from split if required --> i.e. all distinct keys (global+local)
_, left, right = xd_normalise_input_pair(left, right)
# TODO: ?maybe? global + local versions of an attr SHOULD conflict
# -- this way treats them as entirely separate entries, for now.

sleft = {(k, hexdigest(v)) for k, v in left.items()}
sright = {(k, hexdigest(v)) for k, v in right.items()}
Expand All @@ -553,6 +555,7 @@ def _compare_lenient_attributes(left, right):
dsright = dict(sright - sleft)
# Intersection of common item keys with different values.
keys = set(dsleft.keys()) & set(dsright.keys())

return not bool(keys)

@staticmethod
Expand Down
23 changes: 12 additions & 11 deletions lib/iris/tests/unit/common/metadata/test_CubeMetadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,37 +127,38 @@ def attrs_check(
check_testcase: str, check_lenient: bool, op: str, cases: dict
):
"""
Check the attributes handling of a metadata operation.
Check the split-attributes handling of a metadata operation.
Testcases are the ones already listed in _ATTRS_TESTCASE_INPUTS, where they are
coded strings, as used in iris.tests.integration.test_netcdf_loadsaveattrs.
* construct the 2 inputs from _ATTRS_TESTCASE_INPUTS[check_testcase],
* then perform
result = op(*inputs, lenient=check_lenient).
* convert the result to a result-code string, again like test_netcdf_loadsaveattrs.
* (except for equality) convert the result to a "result-code string",
again like in test_netcdf_loadsaveattrs.
* assert that the (encoded) results match the expected
The 'cases' args specifies the "expected" result-code answers for each testcase :
either two result-codes for 'strict' and 'lenient' cases, when those are different,
or a single result-code if strict and lenient results are the same.
either two results for 'strict' and 'lenient' cases, when those are different,
or a single result if strict and lenient results are the same.
"""
# cases.keys() are the testcase names -- should match the master table
assert cases.keys() == _ATTRS_TESTCASE_INPUTS.keys()
# Each case is recorded as testcase: (<input>, [*output-codes])
# The 'input' is just for readability: it should match that in the master table.
# The "input"s are only for readability, and should match those in the master table.
assert all(
cases[key][0] == _ATTRS_TESTCASE_INPUTS[key]
for key in _ATTRS_TESTCASE_INPUTS
)
# Perform the configured check, and check that the results are as expected.
testcase = cases[check_testcase]
input_spec, result_specs = testcase
input_spec, result_specs = cases[check_testcase]
input_spec = input_spec.split(
":"
) # make a list from the two sides of the ":"
assert len(input_spec) == 2

# convert to a list of (global, *locals) value sets
input_values = decode_matrix_input(input_spec)

Expand Down Expand Up @@ -195,7 +196,7 @@ def attrsdict(value):
result = getattr(input_l, op)(input_r, lenient=check_lenient)

# Convert the result to the form of the recorded "expected" output.
# This depends on the test operation...
# The expected-result format depends on the operation under test.
assert op in ("combine", "equal", "difference")
if op == "combine":
# "combine" result is CubeMetadata
Expand All @@ -204,9 +205,8 @@ def attrsdict(value):
result.attributes.globals.get("_testattr_", None),
result.attributes.locals.get("_testattr_", None),
]
(result,) = encode_matrix_result(
values
) # NB always a list of 1 spec (string)
# N.B. encode_matrix_result returns a list of results (always 1 in this case).
(result,) = encode_matrix_result(values)

elif op == "difference":
# "difference" op result is a CubeMetadata, its values are difference-pairs.
Expand Down Expand Up @@ -256,6 +256,7 @@ def valrep_pair(val):
# (value-pairs) == [[None, "a"], [None, None]]
# --> (value-codes) ["-a", "--"]
# --> (result) "G-aL--"
# N.B. encode_matrix_result returns a list of results (1 in this case).
(result,) = encode_matrix_result(global_local_valuecodes)

else:
Expand Down

0 comments on commit 76a3974

Please sign in to comment.