diff --git a/ivy/data_classes/array/experimental/losses.py b/ivy/data_classes/array/experimental/losses.py index 4d69d413b2313..794b7ed76179d 100644 --- a/ivy/data_classes/array/experimental/losses.py +++ b/ivy/data_classes/array/experimental/losses.py @@ -1,6 +1,50 @@ # global import abc +from typing import Optional, Union + +# local +import ivy class _ArrayWithLossesExperimental(abc.ABC): - pass + def l1_loss( + self: Union[ivy.Array, ivy.NativeArray], + target: Union[ivy.Array, ivy.NativeArray], + /, + *, + reduction: Optional[str] = "mean", + out: Optional[ivy.Array] = None, + ) -> ivy.Array: + """ + ivy.Array instance method variant of ivy.l1_loss. This method simply wraps the + function, and so the docstring for ivy.l1_loss also applies to this method with + minimal changes. + + Parameters + ---------- + self + input array. + target + input array containing the targeted values. + reduction + ``'mean'``: The output will be averaged. + ``'sum'``: The output will be summed. + ``'none'``: No reduction will be applied to the output. Default: ``'mean'``. + out + optional output array, for writing the result to. It must have a shape that + the inputs broadcast to. + + Returns + ------- + ret + The L1 loss between the input array and the targeticted values. + + Examples + -------- + >>> x = ivy.array([1.0, 2.0, 3.0]) + >>> y = ivy.array([0.7, 1.8, 2.9]) + >>> z = x.l1_loss(y) + >>> print(z) + ivy.array(0.20000000000000004) + """ + return ivy.l1_loss(self._data, target, reduction=reduction, out=out) diff --git a/ivy/data_classes/container/container.py b/ivy/data_classes/container/container.py index cc82cc3af2cb0..2c16ee729687a 100644 --- a/ivy/data_classes/container/container.py +++ b/ivy/data_classes/container/container.py @@ -44,6 +44,7 @@ _ContainerWithSortingExperimental, _ContainerWithStatisticalExperimental, _ContainerWithUtilityExperimental, + _ContainerWithLossesExperimental, ) @@ -87,6 +88,7 @@ class Container( _ContainerWithSortingExperimental, _ContainerWithStatisticalExperimental, _ContainerWithUtilityExperimental, + _ContainerWithLossesExperimental, ): def __init__( self, diff --git a/ivy/data_classes/container/experimental/__init__.py b/ivy/data_classes/container/experimental/__init__.py index c56104d2fd84f..9723f971c453e 100644 --- a/ivy/data_classes/container/experimental/__init__.py +++ b/ivy/data_classes/container/experimental/__init__.py @@ -17,3 +17,4 @@ from .sorting import _ContainerWithSortingExperimental from .statistical import _ContainerWithStatisticalExperimental from .utility import _ContainerWithUtilityExperimental +from .losses import _ContainerWithLossesExperimental diff --git a/ivy/data_classes/container/experimental/losses.py b/ivy/data_classes/container/experimental/losses.py index e69de29bb2d1d..1e3305c80b1e4 100644 --- a/ivy/data_classes/container/experimental/losses.py +++ b/ivy/data_classes/container/experimental/losses.py @@ -0,0 +1,161 @@ +# global +from typing import Optional, Union, List, Dict + +# local +import ivy +from ivy.data_classes.container.base import ContainerBase + + +class _ContainerWithLossesExperimental(ContainerBase): + @staticmethod + def _static_l1_loss( + input: Union[ivy.Container, ivy.Array, ivy.NativeArray], + target: Union[ivy.Container, ivy.Array, ivy.NativeArray], + /, + *, + reduction: Optional[Union[str, ivy.Container]] = "mean", + key_chains: Optional[Union[List[str], Dict[str, str], ivy.Container]] = None, + to_apply: Union[bool, ivy.Container] = True, + prune_unapplied: Union[bool, ivy.Container] = False, + map_sequences: Union[bool, ivy.Container] = False, + out: Optional[ivy.Container] = None, + ) -> ivy.Container: + """ + ivy.Container static method variant of ivy.l1_loss. This method simply wraps the + function, and so the docstring for ivy.l1_loss also applies to this method with + minimal changes. + + Parameters + ---------- + input + input array or container. + target + input array or container containing the targeted values. + reduction + ``'mean'``: The output will be averaged. + ``'sum'``: The output will be summed. + ``'none'``: No reduction will be applied to the output. Default: ``'mean'``. + key_chains + The key-chains to apply or not apply the method to. Default is ``None``. + to_apply + If input, the method will be applied to key_chains, otherwise key_chains + will be skipped. Default is ``input``. + prune_unapplied + Whether to prune key_chains for which the function was not applied. + Default is ``False``. + map_sequences + Whether to also map method to sequences (lists, tuples). + Default is ``False``. + out + optional output container, for writing the result to. It must have a shape + that the inputs broadcast to. + + Returns + ------- + ret + The L1 loss between the input array and the targeted values. + + Examples + -------- + With :class:`ivy.Container` inputs: + + >>> x = ivy.Container(a=ivy.array([1, 2, 3]), b=ivy.array([4, 5, 6])) + >>> y = ivy.Container(a=ivy.array([2, 2, 2]), b=ivy.array([5, 5, 5])) + >>> z = ivy.Container.static_l1_loss(x, y) + >>> print(z) + { + a: ivy.array(1.), + b: ivy.array(0.) + } + + With a mix of :class:`ivy.Array` and :class:`ivy.Container` inputs: + + >>> x = ivy.array([1, 2, 3]) + >>> y = ivy.Container(a=ivy.array([2, 2, 2]), b=ivy.array([5, 5, 5])) + >>> z = ivy.Container.static_l1_loss(x, y) + >>> print(z) + { + a: ivy.array(1.), + b: ivy.array(4.) + } + """ + return ContainerBase.cont_multi_map_in_function( + "l1_loss", + input, + target, + reduction=reduction, + key_chains=key_chains, + to_apply=to_apply, + prune_unapplied=prune_unapplied, + map_sequences=map_sequences, + out=out, + ) + + def l1_loss( + self: ivy.Container, + target: Union[ivy.Container, ivy.Array, ivy.NativeArray], + /, + *, + reduction: Optional[Union[str, ivy.Container]] = "mean", + key_chains: Optional[Union[List[str], Dict[str, str], ivy.Container]] = None, + to_apply: Union[bool, ivy.Container] = True, + prune_unapplied: Union[bool, ivy.Container] = False, + map_sequences: Union[bool, ivy.Container] = False, + out: Optional[ivy.Container] = None, + ) -> ivy.Container: + """ + ivy.Container instance method variant of ivy.l1_loss. This method simply wraps + the function, and so the docstring for ivy.l1_loss also applies to this method + with minimal changes. + + Parameters + ---------- + self + input container. + target + input array or container containing the targeticted values. + reduction + ``'mean'``: The output will be averaged. + ``'sum'``: The output will be summed. + ``'none'``: No reduction will be applied to the output. Default: ``'mean'``. + key_chains + The key-chains to apply or not apply the method to. Default is ``None``. + to_apply + If input, the method will be applied to key_chains, otherwise key_chains + will be skipped. Default is ``input``. + prune_unapplied + Whether to prune key_chains for which the function was not applied. + Default is ``False``. + map_sequences + Whether to also map method to sequences (lists, tuples). + Default is ``False``. + out + optional output container, for writing the result to. It must have a shape + that the inputs broadcast to. + + Returns + ------- + ret + The L1 loss between the input array and the targeticted values. + + Examples + -------- + >>> x = ivy.Container(a=ivy.array([1, 2, 3]), b=ivy.array([4, 5, 6])) + >>> y = ivy.Container(a=ivy.array([2, 2, 2]), b=ivy.array([5, 5, 5])) + >>> z = x.l1_loss(y) + >>> print(z) + { + a: ivy.array(1.), + b: ivy.array(0.) + } + """ + return self._static_l1_loss( + self, + target, + reduction=reduction, + key_chains=key_chains, + to_apply=to_apply, + prune_unapplied=prune_unapplied, + map_sequences=map_sequences, + out=out, + ) diff --git a/ivy/functional/backends/paddle/experimental/losses.py b/ivy/functional/backends/paddle/experimental/losses.py new file mode 100644 index 0000000000000..8c83a1680bb8e --- /dev/null +++ b/ivy/functional/backends/paddle/experimental/losses.py @@ -0,0 +1,36 @@ +# global +from typing import Optional +import paddle +import paddle.nn.functional as F + +# local +from ivy.func_wrapper import with_unsupported_device_and_dtypes +from . import backend_version + + +@with_unsupported_device_and_dtypes( + { + "2.5.1 and below": { + "cpu": ( + "float16", + "int8", + "int16", + "int32", + "int64", + "uint8", + "complex64", + "complex128", + "bool", + ) + } + }, + backend_version, +) +def l1_loss( + input: paddle.Tensor, + target: paddle.Tensor, + /, + *, + reduction: Optional[str] = "mean", +) -> paddle.Tensor: + return F.l1_loss(input, target, reduction=reduction) diff --git a/ivy/functional/backends/torch/experimental/losses.py b/ivy/functional/backends/torch/experimental/losses.py new file mode 100644 index 0000000000000..2b88adebd3c6c --- /dev/null +++ b/ivy/functional/backends/torch/experimental/losses.py @@ -0,0 +1,24 @@ +from typing import Optional +import torch +from ivy.func_wrapper import with_unsupported_dtypes +from . import backend_version + +# Assuming ivy and backend_version are imported and defined properly + + +@with_unsupported_dtypes( + {"2.0.1 and below": ("unit8", "int8", "int16", "int32", "int64", "bool")}, + backend_version, +) +def l1_loss( + input: torch.Tensor, + target: torch.Tensor, + /, + *, + reduction: Optional[str] = "mean", +) -> torch.Tensor: + return torch.nn.functional.l1_loss( + input, + target, + reduction=reduction, + ) diff --git a/ivy/functional/ivy/experimental/losses.py b/ivy/functional/ivy/experimental/losses.py index 237c1c57b0cec..592d17eda5367 100644 --- a/ivy/functional/ivy/experimental/losses.py +++ b/ivy/functional/ivy/experimental/losses.py @@ -6,6 +6,7 @@ from ivy.func_wrapper import ( handle_nestable, inputs_to_ivy_arrays, + handle_array_like_without_promotion, handle_array_function, ) from ivy.utils.exceptions import handle_exceptions @@ -93,3 +94,61 @@ def log_poisson_loss( return ivy.mean(loss, axis=axis, out=out) else: return ivy.inplace_update(out, loss) if out is not None else loss + + +@handle_exceptions +@handle_nestable +@handle_array_like_without_promotion +@inputs_to_ivy_arrays +@handle_array_function +def l1_loss( + input: Union[ivy.Array, ivy.NativeArray], + target: Union[ivy.Array, ivy.NativeArray], + /, + *, + reduction: Optional[str] = "mean", + out: Optional[ivy.Array] = None, +) -> ivy.Array: + """ + Compute L1 loss (Mean Absolute Error - MAE) between targeticted and input values. + + Parameters + ---------- + input : Union[ivy.Array, ivy.NativeArray] + Input array containing input values. + target : Union[ivy.Array, ivy.NativeArray] + Input array containing targeted values. + reduction : str, optional + Reduction method for the output loss. Options: + "none" (no reduction), "mean" (mean of losses), + "sum" (sum of losses). Default: "mean". + out : Optional[ivy.Array], optional + Optional output array for writing the result to. + It must have a shape that the inputs broadcast to. + + + Returns + ------- + ivy.Array + The L1 loss (MAE) between the given input and targeticted values. + + + Examples + -------- + >>> x = ivy.array([1.0, 2.0, 3.0]) + >>> y = ivy.array([0.5, 2.5, 2.0]) + >>> print(ivy.l1_loss(x, y)) + ivy.array(0.6) + >>> a = ivy.array([[1.0, 2.0], [3.0, 4.0]]) + >>> b = ivy.array([[0.5, 1.5], [2.5, 3.5]]) + >>> print(ivy.l1_loss(a, b)) + ivy.array(0.5) + """ + loss = ivy.abs(target - input) + + if reduction == "sum": + return ivy.sum(loss, out=out) + elif reduction == "mean": + return ivy.mean(loss, out=out) + else: + return ivy.inplace_update(out, loss) if out is not None else loss diff --git a/ivy_tests/test_ivy/test_functional/test_experimental/test_nn/test_losses.py b/ivy_tests/test_ivy/test_functional/test_experimental/test_nn/test_losses.py index e45c0028468c2..b81da2ac3535a 100644 --- a/ivy_tests/test_ivy/test_functional/test_experimental/test_nn/test_losses.py +++ b/ivy_tests/test_ivy/test_functional/test_experimental/test_nn/test_losses.py @@ -57,3 +57,45 @@ def test_log_poisson_loss( compute_full_loss=compute_full_loss, atol_=1e-2, ) + + +@handle_test( + fn_tree="functional.ivy.experimental.l1_loss", + dtype_input=helpers.dtype_and_values( + available_dtypes=helpers.get_dtypes("float"), + min_value=1, + max_value=100, + allow_inf=False, + ), + dtype_target=helpers.dtype_and_values( + available_dtypes=helpers.get_dtypes("float"), + min_value=1, + max_value=100, + allow_inf=False, + ), + reduction=st.sampled_from(["sum", "mean", "none"]), +) +def test_l1_loss( + *, + dtype_input, + dtype_target, + reduction, + test_flags, + backend_fw, + fn_name, + on_device, +): + dtype_input, input = dtype_input + dtype_target, target = dtype_target + + helpers.test_function( + input_dtypes=dtype_input + dtype_target, + test_flags=test_flags, + backend_to_test=backend_fw, + fn_name=fn_name, + on_device=on_device, + atol_=1e-02, + input=input[0], + target=target[0], + reduction=reduction, + )