From 243a8e01bd0974c1fb0176b5ae6c7c7d5fc0f3bf Mon Sep 17 00:00:00 2001 From: Felix Hirwa Nshuti Date: Mon, 19 Sep 2022 02:20:23 +0000 Subject: [PATCH 1/8] Added skeleton files for jax.numpy frontend --- ivy/functional/frontends/jax/__init__.py | 6 ++++++ ivy/functional/frontends/jax/numpy/__init__.py | 7 +++++++ ivy/functional/frontends/jax/numpy/fft.py | 0 ivy/functional/frontends/jax/numpy/linalg.py | 0 ivy/functional/frontends/jax/numpy/name_space_functions.py | 0 .../test_ivy/test_frontends/test_jax/test_jax_numpy_fft.py | 0 .../test_frontends/test_jax/test_jax_numpy_linalg.py | 0 .../test_jax/test_jax_numpy_namespace_functions.py | 0 8 files changed, 13 insertions(+) create mode 100644 ivy/functional/frontends/jax/numpy/__init__.py create mode 100644 ivy/functional/frontends/jax/numpy/fft.py create mode 100644 ivy/functional/frontends/jax/numpy/linalg.py create mode 100644 ivy/functional/frontends/jax/numpy/name_space_functions.py create mode 100644 ivy_tests/test_ivy/test_frontends/test_jax/test_jax_numpy_fft.py create mode 100644 ivy_tests/test_ivy/test_frontends/test_jax/test_jax_numpy_linalg.py create mode 100644 ivy_tests/test_ivy/test_frontends/test_jax/test_jax_numpy_namespace_functions.py diff --git a/ivy/functional/frontends/jax/__init__.py b/ivy/functional/frontends/jax/__init__.py index c74e1035cf1ab..83e642ef7318b 100644 --- a/ivy/functional/frontends/jax/__init__.py +++ b/ivy/functional/frontends/jax/__init__.py @@ -13,3 +13,9 @@ from ivy.functional.frontends.jax.lax.linalg import * from .nn import non_linear_activations from ivy.functional.frontends.jax.nn.non_linear_activations import * +from .numpy import name_space_functions +from ivy.functional.frontends.jax.numpy.name_space_functions import * +from .numpy import fft +from ivy.functional.frontends.jax.numpy.fft import * +from .numpy import linalg +from ivy.functional.frontends.jax.numpy.linalg import * diff --git a/ivy/functional/frontends/jax/numpy/__init__.py b/ivy/functional/frontends/jax/numpy/__init__.py new file mode 100644 index 0000000000000..ab2c4e26dc54b --- /dev/null +++ b/ivy/functional/frontends/jax/numpy/__init__.py @@ -0,0 +1,7 @@ +# flake8: noqa +from . import name_space_functions +from .name_space_functions import * +from . import fft +from .fft import * +from . import linalg +from .linalg import * diff --git a/ivy/functional/frontends/jax/numpy/fft.py b/ivy/functional/frontends/jax/numpy/fft.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/ivy/functional/frontends/jax/numpy/linalg.py b/ivy/functional/frontends/jax/numpy/linalg.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/ivy/functional/frontends/jax/numpy/name_space_functions.py b/ivy/functional/frontends/jax/numpy/name_space_functions.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/ivy_tests/test_ivy/test_frontends/test_jax/test_jax_numpy_fft.py b/ivy_tests/test_ivy/test_frontends/test_jax/test_jax_numpy_fft.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/ivy_tests/test_ivy/test_frontends/test_jax/test_jax_numpy_linalg.py b/ivy_tests/test_ivy/test_frontends/test_jax/test_jax_numpy_linalg.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/ivy_tests/test_ivy/test_frontends/test_jax/test_jax_numpy_namespace_functions.py b/ivy_tests/test_ivy/test_frontends/test_jax/test_jax_numpy_namespace_functions.py new file mode 100644 index 0000000000000..e69de29bb2d1d From 05ff145f0cc70e4f425752544eeaa00422d146a4 Mon Sep 17 00:00:00 2001 From: Felix Hirwa Nshuti Date: Mon, 19 Sep 2022 06:22:02 +0000 Subject: [PATCH 2/8] Added log_softmax to torch backend --- ivy/functional/backends/torch/activations.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/ivy/functional/backends/torch/activations.py b/ivy/functional/backends/torch/activations.py index baf5e74a692aa..bf8b5ecd6c451 100644 --- a/ivy/functional/backends/torch/activations.py +++ b/ivy/functional/backends/torch/activations.py @@ -75,3 +75,17 @@ def softplus(x: torch.Tensor, /, *, out: Optional[torch.Tensor] = None) -> torch softplus.unsupported_dtypes = ("float16", "bfloat16") + + +def log_softmax( + x: torch.Tensor, + /, + *, + axis: Optional[int] = None, + dtype: Optional[torch.dtype] = None, + out: Optional[torch.Tensor] = None, +): + return torch.nn.functional.log_softmax(x, dim=axis, dtype=dtype) + + +log_softmax.unsupported_dtypes = ("float16", "bfloat16") From e553bc4f9ae86571365c1a092754de97c893964e Mon Sep 17 00:00:00 2001 From: Felix Hirwa Nshuti Date: Thu, 22 Sep 2022 13:15:43 +0000 Subject: [PATCH 3/8] Adding log_softmax to all backends --- ivy/functional/backends/jax/activations.py | 8 +++++ ivy/functional/backends/numpy/activations.py | 22 +++++++++++++ .../backends/tensorflow/activations.py | 6 ++++ ivy/functional/backends/torch/activations.py | 3 +- ivy/functional/ivy/activations.py | 32 +++++++++++++++++++ 5 files changed, 69 insertions(+), 2 deletions(-) diff --git a/ivy/functional/backends/jax/activations.py b/ivy/functional/backends/jax/activations.py index 4f02b50c3071d..31f4f46a88060 100644 --- a/ivy/functional/backends/jax/activations.py +++ b/ivy/functional/backends/jax/activations.py @@ -43,3 +43,11 @@ def softmax( def softplus(x: JaxArray, /, *, out: Optional[JaxArray] = None) -> JaxArray: return jnp.log1p(jnp.exp(-jnp.abs(x))) + jnp.maximum(x, 0) + + +def log_softmax( + x: JaxArray, /, *, axis: Optional[int] = None, out: Optional[JaxArray] = None +): + if axis is None: + axis = -1 + return jax.nn.log_softmax(x, axis) diff --git a/ivy/functional/backends/numpy/activations.py b/ivy/functional/backends/numpy/activations.py index e00b46846eabb..5d6eccddc3087 100644 --- a/ivy/functional/backends/numpy/activations.py +++ b/ivy/functional/backends/numpy/activations.py @@ -66,3 +66,25 @@ def softplus(x: np.ndarray, /, *, out: Optional[np.ndarray] = None) -> np.ndarra softplus.support_native_out = True + + +@_handle_0_dim_output +def log_softmax( + x: np.ndarray, /, *, axis: Optional[int] = None, out: Optional[np.ndarray] = None +) -> np.ndarray: + x_max = np.max(x, axis=axis, keepdims=True) + if x_max.ndim > 0: + x_max[~np.isfinite(x_max)] = 0 + elif not np.isfinite(x_max): + x_max = 0 + exp_tmp = np.exp(x - x_max) + + with np.errstate(divide="ignore"): + s = np.sum(exp_tmp, axis=axis, keepdims=True) + ret = np.log(s) + + ret = x - x_max - ret + return ret + + +log_softmax.support_native_out = True diff --git a/ivy/functional/backends/tensorflow/activations.py b/ivy/functional/backends/tensorflow/activations.py index 8b7ce33ce4095..44d3d0db5d4b2 100644 --- a/ivy/functional/backends/tensorflow/activations.py +++ b/ivy/functional/backends/tensorflow/activations.py @@ -42,3 +42,9 @@ def softmax( def softplus(x: Tensor, /, *, out: Optional[Tensor] = None) -> Tensor: return tf.nn.softplus(x) + + +def log_softmax( + x: Tensor, /, *, axis: Optional[int] = None, out: Optional[Tensor] = None +): + return tf.nn.log_softmax(x, axis) diff --git a/ivy/functional/backends/torch/activations.py b/ivy/functional/backends/torch/activations.py index bf8b5ecd6c451..9fad80ac528e8 100644 --- a/ivy/functional/backends/torch/activations.py +++ b/ivy/functional/backends/torch/activations.py @@ -82,10 +82,9 @@ def log_softmax( /, *, axis: Optional[int] = None, - dtype: Optional[torch.dtype] = None, out: Optional[torch.Tensor] = None, ): - return torch.nn.functional.log_softmax(x, dim=axis, dtype=dtype) + return torch.nn.functional.log_softmax(x, axis) log_softmax.unsupported_dtypes = ("float16", "bfloat16") diff --git a/ivy/functional/ivy/activations.py b/ivy/functional/ivy/activations.py index a7c922657af40..3266248820705 100644 --- a/ivy/functional/ivy/activations.py +++ b/ivy/functional/ivy/activations.py @@ -367,3 +367,35 @@ def softplus( """ return current_backend(x).softplus(x, out=out) + + +@to_native_arrays_and_back +@handle_out_argument +@handle_nestable +@handle_exceptions +def log_softmax( + x: Union[ivy.Array, ivy.NativeArray], + /, + *, + axis: Optional[int] = -1, + out: Optional[ivy.Array] = None, +) -> ivy.Array: + """Applies the log_softmax function element-wise. + + Parameters + ---------- + x + Input array. + axis + The dimension softmax would be performed on. The default is -1 which indicates + the last dimension. + out + optional output array, for writing the result to. It must have a shape that the + inputs broadcast to. + + Returns + ------- + ret + The output array with log_softmax applied element-wise to input. + """ + return current_backend(x).log_softmax(x, axis=axis, out=out) From 7455e544498b14696e7d9fb20cda645767c7902f Mon Sep 17 00:00:00 2001 From: Felix Hirwa Nshuti Date: Thu, 22 Sep 2022 13:52:34 +0000 Subject: [PATCH 4/8] Added instance methods for log_softmax --- ivy/array/activations.py | 26 +++++++ ivy/container/activations.py | 138 +++++++++++++++++++++++++++++++++++ 2 files changed, 164 insertions(+) diff --git a/ivy/array/activations.py b/ivy/array/activations.py index fb7f4d7c2b20b..9d4a49b1bf4c7 100644 --- a/ivy/array/activations.py +++ b/ivy/array/activations.py @@ -135,3 +135,29 @@ def softplus( """ return ivy.softplus(self._data, beta=beta, threshold=threshold, out=out) + + def log_softmax( + self: ivy.Array, + /, + *, + axis: Optional[int] = None, + out: Optional[ivy.Array] = None, + ) -> ivy.Array: + """ + ivy.Array instance method variant of ivy.log_softmax. + This method simply wraps the function, + and so the docstring for ivy.log_softmax also applies to this method + with minimal changes. + + Examples + -------- + >>> x = ivy.array([-1.0, -0.98, 2.3]) + >>> y = x.log_softmax() + >>> print(y) + ivy.array([-3.37, -3.35, -0.0719]) + + >>> x = ivy.array([2.0, 3.4, -4.2]) + >>> y = x.log_softmax(x) + ivy.array([-1.62, -0.221, -7.82 ]) + """ + return ivy.log_softmax(self._data, axis=axis, out=out) diff --git a/ivy/container/activations.py b/ivy/container/activations.py index da62c25beeaba..9a0fd103c4b8e 100644 --- a/ivy/container/activations.py +++ b/ivy/container/activations.py @@ -758,3 +758,141 @@ def softplus( map_sequences=map_sequences, out=out, ) + + @staticmethod + def static_log_softmax( + x: Union[ivy.Array, ivy.NativeArray, ivy.Container], + /, + *, + axis: Optional[ivy.Container] = None, + key_chains: Optional[Union[List[str], Dict[str, str]]] = None, + to_apply: bool = True, + prune_unapplied: bool = False, + map_sequences: bool = False, + out: Optional[ivy.Container] = None, + ) -> ivy.Container: + """ + ivy.Container static method variant of ivy.log_softmax. + This method simply wraps the function, and so the docstring + for ivy.log_softmax also applies to this method with minimal changes. + + Parameters + ---------- + x + input container. + axis + the axis or axes along which the log_softmax should be computed + key_chains + The key-chains to apply or not apply the method to. Default is None. + to_apply + If True, the method will be applied to key_chains, otherwise key_chains + will be skipped. Default is True. + prune_unapplied + Whether to prune key_chains for which the function was not applied. + Default is False. + map_sequences + Whether to also map method to sequences (lists, tuples). Default is False. + out + optional output container, for writing the result to. It must have a shape + that the inputs broadcast to. + + Returns + ------- + ret + a container with the log_softmax unit function applied element-wise. + + Examples + -------- + >>> x = ivy.Container(a=ivy.array([-1.0, -0.98, 2.3])) + >>> y = ivy.Container.static_log_softmax(x) + >>> print(y) + { + a: ivy.array([-3.37, -3.35, -0.0719]) + } + + >>> x = ivy.Container(a=ivy.array([1.0, 2.4]), b=ivy.array([-0.2, -1.0])) + >>> y = ivy.Container.static_log_softmax(x) + >>> print(y) + { + a: ivy.array([-1.62, -0.22]), + b: ivy.array([-0.371, -1.17]) + } + """ + return ContainerBase.multi_map_in_static_method( + "log_softmax", + x, + axis=axis, + key_chains=key_chains, + to_apply=to_apply, + prune_unapplied=prune_unapplied, + map_sequences=map_sequences, + out=out, + ) + + def log_softmax( + self: ivy.Container, + /, + *, + axis: Optional[ivy.Container] = None, + key_chains: Optional[Union[List[str], Dict[str, str]]] = None, + to_apply: bool = True, + prune_unapplied: bool = False, + map_sequences: bool = False, + out: Optional[ivy.Container] = None, + ): + """ + ivy.Container instance method variant of ivy.log_softmax. + This method simply wraps the function, and so the docstring + for ivy.log_softmax also applies to this method with minimal changes. + + Parameters + ---------- + self + input container. + axis + the axis or axes along which the log_softmax should be computed + key_chains + The key-chains to apply or not apply the method to. Default is None. + to_apply + If True, the method will be applied to key_chains, otherwise key_chains + will be skipped. Default is True. + prune_unapplied + Whether to prune key_chains for which the function was not applied. + Default is False. + map_sequences + Whether to also map method to sequences (lists, tuples). Default is False. + out + optional output container, for writing the result to. It must have a shape + that the inputs broadcast to. + + Returns + ------- + ret + a container with the log_softmax unit function applied element-wise. + + Examples + -------- + >>> x = ivy.Container(a=ivy.array([-1.0, -0.98, 2.3])) + >>> y = x.log_softmax() + >>> print(y) + { + a: ivy.array([-3.37, -3.35, -0.0719]) + } + + >>> x = ivy.Container(a=ivy.array([1.0, 2.4]), b=ivy.array([-0.2, -1.0])) + >>> y = x.log_softmax() + >>> print(y) + { + a: ivy.array([-1.62, -0.22]), + b: ivy.array([-0.371, -1.17]) + } + """ + return self.static_log_softmax( + self, + axis=axis, + key_chains=key_chains, + to_apply=to_apply, + prune_unapplied=prune_unapplied, + map_sequences=map_sequences, + out=out, + ) From 99e07947471af472b62c887db53656d22991dcbf Mon Sep 17 00:00:00 2001 From: Felix Hirwa Nshuti Date: Thu, 22 Sep 2022 14:04:11 +0000 Subject: [PATCH 5/8] added functional examples to log_softmax --- ivy/functional/ivy/activations.py | 39 +++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/ivy/functional/ivy/activations.py b/ivy/functional/ivy/activations.py index 6221e8b673c56..967f0d443a80d 100644 --- a/ivy/functional/ivy/activations.py +++ b/ivy/functional/ivy/activations.py @@ -415,5 +415,44 @@ def log_softmax( ------- ret The output array with log_softmax applied element-wise to input. + + Examples + -------- + + With :code: `ivy.Array` input: + + >>> x = ivy.array([-1.0, -0.98]) + >>> y = ivy.log_softmax(x) + >>> print(y) + ivy.array([-0.703, -0.683]) + + >>> x = ivy.array([1.0, 2.0, 3.0]) + >>> y = ivy.log_softmax(x) + >>> print(y) + ivy.array([-2.41, -1.41, -0.408]) + + With :code: `ivy.NativeArray` input: + + >>> x = ivy.native_array([1.5, 0.5, 1.0]) + >>> y = ivy.log_softmax(x) + >>> print(y) + ivy.array([-0.68, -1.68, -1.18]) + + With :code: `ivy.Container` input: + + >>> x = ivy.Container(a=ivy.array([1.5, 0.5, 1.0])) + >>> y = ivy.log_softmax(x) + >>> print(y) + { + a: ivy.array([-0.68, -1.68, -1.18]) + } + + >>> x = ivy.Container(a=ivy.array([1.0, 2.0]), b=ivy.array([0.4, -0.2])) + >>> y = ivy.log_softmax(x) + >>> print(y) + { + a: ivy.array([-1.31, -0.313]), + b: ivy.array([-0.437, -1.04]) + } """ return current_backend(x).log_softmax(x, axis=axis, out=out) From f6c2869101a8f422ec7d4abc8c01a4669c4c037d Mon Sep 17 00:00:00 2001 From: Felix Hirwa Nshuti Date: Thu, 22 Sep 2022 14:26:16 +0000 Subject: [PATCH 6/8] Added unit test for log_softmax --- .../test_nn/test_activations.py | 43 +++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/ivy_tests/test_ivy/test_functional/test_nn/test_activations.py b/ivy_tests/test_ivy/test_functional/test_nn/test_activations.py index 87c7bde4997a0..817610c671a47 100644 --- a/ivy_tests/test_ivy/test_functional/test_nn/test_activations.py +++ b/ivy_tests/test_ivy/test_functional/test_nn/test_activations.py @@ -264,3 +264,46 @@ def test_softplus( beta=beta, threshold=threshold, ) + + +# log_softmax +@handle_cmd_line_args +@given( + dtype_and_x=helpers.dtype_and_values( + available_dtypes=helpers.get_dtypes("float"), + min_num_dims=1, + large_abs_safety_factor=8, + small_abs_safety_factor=8, + safety_factor_scale="log", + ), + axis=helpers.ints(min_value=-1, max_value=0), + num_positional_args=helpers.num_positional_args(fn_name="log_softmax"), +) +def test_log_softmax( + *, + dtype_and_x, + as_variable, + axis, + with_out, + num_positional_args, + container, + instance_method, + native_array, + fw, +): + dtype, x = dtype_and_x + helpers.test_function( + input_dtypes=dtype, + as_variable_flags=as_variable, + with_out=with_out, + native_array_flags=native_array, + fw=fw, + num_positional_args=num_positional_args, + container_flags=container, + instance_method=instance_method, + fn_name="log_softmax", + rtol_=1e-02, + atol_=1e-02, + x=np.asarray(x, dtype=dtype), + axis=axis, + ) From d544b3c66d4fb99eee2df4ddadbed63073ac3411 Mon Sep 17 00:00:00 2001 From: Felix Hirwa Nshuti Date: Thu, 22 Sep 2022 14:31:25 +0000 Subject: [PATCH 7/8] typo fix in docstring --- ivy/functional/ivy/activations.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ivy/functional/ivy/activations.py b/ivy/functional/ivy/activations.py index 967f0d443a80d..d50228ec2ee5b 100644 --- a/ivy/functional/ivy/activations.py +++ b/ivy/functional/ivy/activations.py @@ -405,8 +405,8 @@ def log_softmax( x Input array. axis - The dimension softmax would be performed on. The default is -1 which indicates - the last dimension. + The dimension log_softmax would be performed on. The default is -1 + which indicates the last dimension. out optional output array, for writing the result to. It must have a shape that the inputs broadcast to. From d30fe7998d1ac61ccd3f10fc57ae39ce958503d9 Mon Sep 17 00:00:00 2001 From: Felix Hirwa Nshuti Date: Fri, 23 Sep 2022 16:23:40 +0000 Subject: [PATCH 8/8] docstring fix --- ivy/functional/ivy/activations.py | 1 - 1 file changed, 1 deletion(-) diff --git a/ivy/functional/ivy/activations.py b/ivy/functional/ivy/activations.py index d50228ec2ee5b..0baf1c8d3ca18 100644 --- a/ivy/functional/ivy/activations.py +++ b/ivy/functional/ivy/activations.py @@ -418,7 +418,6 @@ def log_softmax( Examples -------- - With :code: `ivy.Array` input: >>> x = ivy.array([-1.0, -0.98])