Skip to content

Commit

Permalink
Fix typo and type hints
Browse files Browse the repository at this point in the history
  • Loading branch information
jklaise committed Dec 2, 2020
1 parent 35e7a4d commit e8cfb6b
Showing 1 changed file with 4 additions and 5 deletions.
9 changes: 4 additions & 5 deletions alibi/explainers/integrated_gradients.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,6 @@ def _sum_rows(inp):
end_out = _run_forward(model, end_point, target)

if (len(model.output_shape) == 1 or model.output_shape[1] == 1) and target is not None:

target_tensor = tf.cast(target, dtype=start_out.dtype)
target_tensor = tf.reshape(1 - target_tensor, [len(target), 1])
sign = 2 * target_tensor - 1
Expand Down Expand Up @@ -199,6 +198,7 @@ def _gradients_layer(model: Union[tf.keras.models.Model, 'keras.models.Model'],
Gradients for each element of layer.
"""

def watch_layer(layer, tape):
"""
Make an intermediate hidden `layer` watchable by the `tape`.
Expand Down Expand Up @@ -376,11 +376,11 @@ class IntegratedGradients(Explainer):

def __init__(self,
model: Union[tf.keras.Model, 'keras.Model'],
layer: Union[None, tf.keras.layers.Layer, 'keras.layers.Layer',
layer: Union[tf.keras.layers.Layer, 'keras.layers.Layer',
List[tf.keras.layers.Layer], List['keras.layers.Layer']] = None,
method: str = "gausslegendre",
n_steps: int = 50,
internal_batch_size: Union[None, int] = 100
internal_batch_size: int = 100
) -> None:
"""
An implementation of the integrated gradients method for Tensorflow and Keras models.
Expand Down Expand Up @@ -484,7 +484,7 @@ def explain(self,
f"Found len(X): {len(X)}, len(baselines): {len(baselines)}")

if max([len(x) for x in X]) != min([len(x) for x in X]):
raise ValueError("First dimension must be egual for all inputs")
raise ValueError("First dimension must be equal for all inputs")

nb_samples = len(X[0])

Expand Down Expand Up @@ -558,7 +558,6 @@ def generator(target_paths=target_paths):
grads_b.append(grad_b)
else:
grads_b = _gradients_input(self.model, paths_b, target_b)

batches.append(grads_b)

if self.layer is not None:
Expand Down

0 comments on commit e8cfb6b

Please sign in to comment.