Skip to content

Commit

Permalink
Running Doctests for multilabel-margin-loss successfully.
Browse files Browse the repository at this point in the history
  • Loading branch information
Huvinesh Rajendran committed Nov 8, 2023
1 parent 5cf0382 commit c501571
Show file tree
Hide file tree
Showing 6 changed files with 47 additions and 24 deletions.
11 changes: 6 additions & 5 deletions ivy/data_classes/array/experimental/losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -402,7 +402,7 @@ def multilabel_margin_loss(
Returns
-------
out : array
|array|
The calculated multilabel margin loss.
Examples
Expand All @@ -413,14 +413,15 @@ def multilabel_margin_loss(
... [1, 1, 1], [1, -1, 1]], dtype=ivy.float32)
>>> loss_none = input_tensor.multilabel_margin_loss(
... target=target_tensor, reduction="none")
... target_tensor, reduction="none")
>>> print(loss_none)
ivy.array([3.49625897, 0.71111226, 0.43989015])
ivy.array([3.49625897, 0.71111232, 0.43989024])
>>> loss_mean = input_tensor.multilabel_margin_loss(
... target=target_tensor, reduction="mean")
... target_tensor, reduction="mean")
>>> print(loss_mean)
ivy.array(1.54908717)
"""
return ivy.multilabel_margin_loss(
input=self._data, target=target, axis=axis, reduction=reduction, out=out
self._data, target, axis=axis, reduction=reduction, out=out
)
4 changes: 2 additions & 2 deletions ivy/data_classes/container/experimental/losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -1159,7 +1159,7 @@ def _static_multilabel_margin_loss(
... input_tensor, target_tensor)
>>> print(loss)
{
a: ivy.array([3.49625897, 0.71111226, 0.43989015])
a: ivy.array([3.49625897, 0.71111232, 0.43989024])
}
With a mix of :class:`ivy.Array` and :class:`ivy.Container` inputs:
Expand Down Expand Up @@ -1245,7 +1245,7 @@ def multilabel_margin_loss(
>>> loss = input_tensor.multilabel_margin_loss(target_tensor)
>>> print(loss)
{
a: ivy.array([3.49625897, 0.71111226, 0.43989015])
a: ivy.array([3.49625897, 0.71111232, 0.43989024])
}
With a mix of :class:`ivy.Array` and :class:`ivy.Container` inputs:
Expand Down
21 changes: 13 additions & 8 deletions ivy/functional/backends/numpy/experimental/losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,16 +181,21 @@ def poisson_nll_loss(
)
@_scalar_output_to_0d_array
def multilabel_margin_loss(
input: np.ndarray, target: np.ndarray, /, *, reduction: str = "none"
input: np.ndarray,
target: np.ndarray,
/,
*,
reduction: str = "none",
out: Optional[np.ndarray] = None,
) -> np.ndarray:
input_arr = np.asanyarray(input)
target_arr = np.asanyarray(target, dtype=input.dtype)

loss = np.maximum.reduce(
np.maximum(0, 1 - np.take_along_axis(input_arr, target_arr, axis=0))
) / np.prod(input_arr.shape)

target_arr = np.asanyarray(target)
loss = -(
target_arr * (-np.logaddexp(0, -input_arr))
+ (1 - target_arr) * (-np.logaddexp(0, input_arr))
)
loss = np.mean(loss, axis=-1)
if reduction not in ["sum", "mean", "none"]:
raise ValueError("Invalid reduction value. Expected 'sum', 'mean', or 'none'.")

return _apply_loss_reduction(loss, reduction=reduction)
return _apply_loss_reduction(loss, reduction=reduction, out=out)
12 changes: 11 additions & 1 deletion ivy/functional/backends/paddle/experimental/losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,7 +251,12 @@ def poisson_nll_loss(
backend_version,
)
def multilabel_margin_loss(
input: paddle.Tensor, target: paddle.Tensor, /, *, reduction: str = "none"
input: paddle.Tensor,
target: paddle.Tensor,
/,
*,
reduction: str = "none",
out: Optional[paddle.Tensor] = None,
) -> paddle.Tensor:
"""
Compute the multilabel margin loss.
Expand All @@ -278,6 +283,11 @@ def multilabel_margin_loss(
raise ValueError(
"Invalid reduction value. Allowed values are 'none', 'mean', 'sum'."
)
if out is not None:
raise ValueError(
"Invalid out value. out is not supported in paddle, hence only None is"
" allowed."
)

loss = F.multi_label_soft_margin_loss(
input=input, label=target, reduction=reduction
Expand Down
4 changes: 2 additions & 2 deletions ivy/functional/ivy/experimental/losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -620,7 +620,7 @@ def multilabel_margin_loss(
... [1, 1, 1], [1, -1, 1]], dtype=ivy.float32)
>>> multilabel_margin_loss(input_tensor, target_tensor)
ivy.array([3.49625897, 0.71111226, 0.43989015])
ivy.array([3.49625897, 0.71111232, 0.43989024])
>>> multilabel_margin_loss(input_tensor, target_tensor, reduction="mean")
ivy.array(1.54908717)
Expand All @@ -630,5 +630,5 @@ def multilabel_margin_loss(
"Invalid value for 'reduction'. Expected 'none', 'mean', or 'sum'."
)
return ivy.current_backend(input_data).multilabel_margin_loss(
input_data=input_data, target=target, reduction=reduction, out=out
input_data, target, reduction=reduction, out=out
)
19 changes: 13 additions & 6 deletions ivy/functional/ivy/losses.py
Original file line number Diff line number Diff line change
Expand Up @@ -422,12 +422,19 @@ def multilabel_margin_loss(
--------
>>> input_tensor = ivy.array([0.2, 0.4, 0.6])
>>> target_tensor = ivy.array([0, 1, 1])
>>> loss = multilabel_margin_loss(input_data, target)
>>> print(loss)
ivy.array(0.4)
>>> loss_none = multilabel_margin_loss(input_tensor, target_tensor)
>>> print(loss_none)
ivy.array(0.58288069)
"""
ivy.utils.assertions.check_elem_in_list(reduction, ["none", "sum", "mean"])
loss = (
ivy.sum(ivy.maximum(0, 1 - (input_data[target] - target))) / input_data.shape[0]
loss = -(
target * (-ivy.logaddexp(0, -input_data))
+ (1 - target) * (-ivy.logaddexp(0, input_data))
)
return _reduce_loss(reduction=reduction, loss=loss, axis=axis, out=out)
loss = ivy.mean(loss, axis=-1)
if reduction == "none":
return loss
if reduction == "mean":
return ivy.mean(loss, axis=axis, out=out)
if reduction == "sum":
return ivy.sum(loss, axis=axis, out=out)

0 comments on commit c501571

Please sign in to comment.