Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

don't return neutral value, refs #19 #21

Merged
merged 1 commit into from
Nov 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 7 additions & 16 deletions src/distance_explainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,10 @@
import logging
import dianna.utils
import numpy as np
import numpy.typing
from dianna.utils.maskers import generate_interpolated_float_masks_for_image
from sklearn.metrics import pairwise_distances
from tqdm import tqdm
import numpy.typing

logging.getLogger(__name__).addHandler(logging.NullHandler())

Expand Down Expand Up @@ -51,7 +51,8 @@ def __init__(self, n_masks=1000, feature_res=8, p_keep=.5, # pylint: disable=to
self.mask_selection_negative_range_min = mask_selection_negative_range_min
self.batch_size = batch_size

def explain_image_distance(self, model_or_function, input_data, embedded_reference, masks=None) -> tuple[numpy.typing.NDArray, float]:
def explain_image_distance(self, model_or_function, input_data, embedded_reference, masks=None) -> tuple[
numpy.typing.NDArray, float]:
"""Explain an image with respect to a reference point in an embedded space.

Args:
Expand All @@ -61,8 +62,7 @@ def explain_image_distance(self, model_or_function, input_data, embedded_referen
masks: User specified masks, in case no autogenerated masks should be used.

Returns:
saliency map and the neutral value within the saliency map which indicates the parts of the image that
neither bring the image closer nor further away from the embedded reference.
saliency map
"""
full_preprocess_function, input_data = self._prepare_input_data(input_data)
runner = dianna.utils.get_function(model_or_function, preprocess_function=full_preprocess_function)
Expand All @@ -72,7 +72,8 @@ def explain_image_distance(self, model_or_function, input_data, embedded_referen
img_shape = input_data.shape[1:3]
# Expose masks for to make user inspection possible
if masks is None:
self.masks = generate_interpolated_float_masks_for_image(img_shape, active_p_keep, self.n_masks, self.feature_res)
self.masks = generate_interpolated_float_masks_for_image(img_shape, active_p_keep, self.n_masks,
self.feature_res)
else:
self.masks = masks
if self.masks.shape[0] != self.n_masks:
Expand Down Expand Up @@ -124,17 +125,7 @@ def describe(x, name):

saliency = unnormalized_sal

neutral_value = active_p_keep

# for one-sided experiments, use "meaningful" neutral value (the unperturbed distance), otherwise center on 0
if len(lowest_mask_weights) > 0 and len(highest_mask_weights) == 0:
neutral_value = neutral_value
if len(highest_mask_weights) > 0 and len(lowest_mask_weights) == 0:
neutral_value = -neutral_value
if len(highest_mask_weights) > 0 and len(lowest_mask_weights) > 0:
neutral_value = 0

return saliency, neutral_value
return saliency

@staticmethod
def _get_lowest_distance_masks_and_weights(embedded_reference, predictions, masks, mask_selection_range_min,
Expand Down
37 changes: 2 additions & 35 deletions tests/test_distance.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,25 +54,12 @@ def test_distance_explainer_saliency(dummy_data: tuple[ArrayLike, ArrayLike],
explainer = get_explainer(get_default_config())
expected_saliency, expected_value = np.load('./tests/test_data/test_dummy_data_exact_expected_output.npz').values()

saliency, value = explainer.explain_image_distance(dummy_model, input_arr, embedded_reference)
saliency = explainer.explain_image_distance(dummy_model, input_arr, embedded_reference)

assert saliency.shape == (1,) + input_arr.shape[:2] + (1,) # Has correct shape
assert np.allclose(expected_saliency, saliency) # Has correct saliency


@pytest.mark.skip("See 'neutral value not correct #19', https://github.com/dianna-ai/distance_explainer/issues/19")
def test_distance_explainer_value(dummy_data: tuple[ArrayLike, ArrayLike],
dummy_model: Callable):
"""Code output should be identical to recorded value."""
embedded_reference, input_arr = dummy_data
explainer = get_explainer(get_default_config())
expected_saliency, expected_value = np.load('./tests/test_data/test_dummy_data_exact_expected_output.npz').values()

saliency, value = explainer.explain_image_distance(dummy_model, input_arr, embedded_reference)

assert np.allclose(expected_value, value) # Has correct value


@pytest.mark.parametrize("empty_side,expected_tag",
[({"mask_selection_range_max": 0.}, "pos_empty"),
({"mask_selection_negative_range_min": 1.}, "neg_empty")])
Expand All @@ -87,26 +74,6 @@ def test_distance_explainer_one_sided_saliency(dummy_data: tuple[ArrayLike, Arra
config = dataclasses.replace(get_default_config(), **empty_side)
explainer = get_explainer(config)

saliency, value = explainer.explain_image_distance(dummy_model, input_arr, embedded_reference)
saliency = explainer.explain_image_distance(dummy_model, input_arr, embedded_reference)
assert saliency.shape == (1,) + input_arr.shape[:2] + (1,) # Has correct shape
assert np.allclose(expected_saliency, saliency) # Has correct saliency


@pytest.mark.skip("See 'neutral value not correct #19', https://github.com/dianna-ai/distance_explainer/issues/19")
@pytest.mark.parametrize("empty_side,expected_tag",
[({"mask_selection_range_max": 0.}, "pos_empty"),
({"mask_selection_negative_range_min": 1.}, "neg_empty")])
def test_distance_explainer_one_sided_value(dummy_data: tuple[ArrayLike, ArrayLike],
dummy_model: Callable,
empty_side: dict[str, float],
expected_tag: str):
"""Code output should be identical to recorded saliency."""
embedded_reference, input_arr = dummy_data
expected_saliency, expected_value = np.load(
f'./tests/test_data/test_dummy_data_exact_expected_output_{expected_tag}.npz').values()
config = dataclasses.replace(get_default_config(), **empty_side)
explainer = get_explainer(config)

saliency, value = explainer.explain_image_distance(dummy_model, input_arr, embedded_reference)

assert np.allclose(expected_value, value) # Has correct value
Loading