From a501d3a0e2290f070a56c7a8ed5505abe12ee68b Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Tue, 5 Nov 2024 13:44:20 +0100 Subject: [PATCH 01/29] Add test_typing.py --- tests/third_party/cupy/test_typing.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 tests/third_party/cupy/test_typing.py diff --git a/tests/third_party/cupy/test_typing.py b/tests/third_party/cupy/test_typing.py new file mode 100644 index 00000000000..2256ff4ab82 --- /dev/null +++ b/tests/third_party/cupy/test_typing.py @@ -0,0 +1,12 @@ +import pytest + +import dpnp as cupy + + +@pytest.mark.skip("dpnp.typing is not implemented yet") +class TestClassGetItem: + + def test_class_getitem(self): + from typing import Any + + cupy.ndarray[Any, Any] From 3a73b4c77e51891c31a9e76f318318e786fc1e03 Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Tue, 5 Nov 2024 13:26:50 +0100 Subject: [PATCH 02/29] Update test_type_routines.py --- tests/third_party/cupy/test_type_routines.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/tests/third_party/cupy/test_type_routines.py b/tests/third_party/cupy/test_type_routines.py index c1e39a19cd0..80878afeaa0 100644 --- a/tests/third_party/cupy/test_type_routines.py +++ b/tests/third_party/cupy/test_type_routines.py @@ -31,6 +31,7 @@ def _generate_type_routines_input(xp, dtype, obj_type): ) ) class TestCanCast(unittest.TestCase): + @testing.for_all_dtypes_combination(names=("from_dtype", "to_dtype")) @testing.numpy_cupy_equal() def test_can_cast(self, xp, from_dtype, to_dtype): @@ -48,10 +49,11 @@ def test_can_cast(self, xp, from_dtype, to_dtype): @pytest.mark.skip("dpnp.common_type() is not implemented yet") class TestCommonType(unittest.TestCase): + @testing.numpy_cupy_equal() def test_common_type_empty(self, xp): ret = xp.common_type() - assert type(ret) == type + assert type(ret) is type return ret @testing.for_all_dtypes(no_bool=True) @@ -59,7 +61,7 @@ def test_common_type_empty(self, xp): def test_common_type_single_argument(self, xp, dtype): array = _generate_type_routines_input(xp, dtype, "array") ret = xp.common_type(array) - assert type(ret) == type + assert type(ret) is type return ret @testing.for_all_dtypes_combination( @@ -70,7 +72,7 @@ def test_common_type_two_arguments(self, xp, dtype1, dtype2): array1 = _generate_type_routines_input(xp, dtype1, "array") array2 = _generate_type_routines_input(xp, dtype2, "array") ret = xp.common_type(array1, array2) - assert type(ret) == type + assert type(ret) is type return ret @testing.for_all_dtypes() @@ -91,6 +93,7 @@ def test_common_type_bool(self, dtype): ) ) class TestResultType(unittest.TestCase): + @testing.for_all_dtypes_combination(names=("dtype1", "dtype2")) @testing.numpy_cupy_equal() def test_result_type(self, xp, dtype1, dtype2): From 9f96cc176603c06607c9f0ce42d21f65f3717357 Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Tue, 5 Nov 2024 13:38:01 +0100 Subject: [PATCH 03/29] Add test_numpy_interop.py --- tests/third_party/cupy/test_numpy_interop.py | 195 +++++++++++++++++++ 1 file changed, 195 insertions(+) create mode 100644 tests/third_party/cupy/test_numpy_interop.py diff --git a/tests/third_party/cupy/test_numpy_interop.py b/tests/third_party/cupy/test_numpy_interop.py new file mode 100644 index 00000000000..2999cf84092 --- /dev/null +++ b/tests/third_party/cupy/test_numpy_interop.py @@ -0,0 +1,195 @@ +import contextlib +import os +import unittest + +import numpy +import pytest + +import dpnp as cupy +from tests.third_party.cupy import testing + +# import cupyx + +try: + import scipy.sparse + + scipy_available = True +except ImportError: + scipy_available = False + + +@pytest.mark.skip("dpnp.get_array_module() is not supported") +class TestGetArrayModule(unittest.TestCase): + + def test_get_array_module_1(self): + n1 = numpy.array([2], numpy.float32) + c1 = cupy.array([2], numpy.float32) + csr1 = cupyx.scipy.sparse.csr_matrix((5, 3), dtype=numpy.float32) + + assert numpy is cupy.get_array_module() + assert numpy is cupy.get_array_module(n1) + assert cupy is cupy.get_array_module(c1) + assert cupy is cupy.get_array_module(csr1) + + assert numpy is cupy.get_array_module(n1, n1) + assert cupy is cupy.get_array_module(c1, c1) + assert cupy is cupy.get_array_module(csr1, csr1) + + assert cupy is cupy.get_array_module(n1, csr1) + assert cupy is cupy.get_array_module(csr1, n1) + assert cupy is cupy.get_array_module(c1, n1) + assert cupy is cupy.get_array_module(n1, c1) + assert cupy is cupy.get_array_module(c1, csr1) + assert cupy is cupy.get_array_module(csr1, c1) + + if scipy_available: + csrn1 = scipy.sparse.csr_matrix((5, 3), dtype=numpy.float32) + + assert numpy is cupy.get_array_module(csrn1) + assert cupy is cupy.get_array_module(csrn1, csr1) + assert cupy is cupy.get_array_module(csr1, csrn1) + assert cupy is cupy.get_array_module(c1, csrn1) + assert cupy is cupy.get_array_module(csrn1, c1) + assert numpy is cupy.get_array_module(n1, csrn1) + assert numpy is cupy.get_array_module(csrn1, n1) + + +class MockArray(numpy.lib.mixins.NDArrayOperatorsMixin): + __array_priority__ = 20 # less than cupy.ndarray.__array_priority__ + + def __array_ufunc__(self, ufunc, method, *inputs, **kwargs): + assert method == "__call__" + name = ufunc.__name__ + return name, inputs, kwargs + + +@pytest.mark.skip("dpnp.__array_ufunc__ is not supported") +class TestArrayUfunc: + + def test_add(self): + x = cupy.array([3, 7]) + y = MockArray() + assert x + y == ("add", (x, y), {}) + assert y + x == ("add", (y, x), {}) + y2 = y + y2 += x + assert y2 == ("add", (y, x), {"out": y}) + with pytest.raises(TypeError): + x += y + + @pytest.mark.xfail( + reason="cupy.ndarray.__array_ufunc__ does not support gufuncs yet" + ) + def test_matmul(self): + x = cupy.array([3, 7]) + y = MockArray() + assert x @ y == ("matmul", (x, y), {}) + assert y @ x == ("matmul", (y, x), {}) + y2 = y + y2 @= x + assert y2 == ("matmul", (y, x), {"out": y}) + with pytest.raises(TypeError): + x @= y + + def test_lt(self): + x = cupy.array([3, 7]) + y = MockArray() + assert (x < y) == ("less", (x, y), {}) + assert (y < x) == ("less", (y, x), {}) + + +class MockArray2: + __array_ufunc__ = None + + def __add__(self, other): + return "add" + + def __radd__(self, other): + return "radd" + + def __matmul__(self, other): + return "matmul" + + def __rmatmul__(self, other): + return "rmatmul" + + def __lt__(self, other): + return "lt" + + def __gt__(self, other): + return "gt" + + +@pytest.mark.skip("dpnp.__array_ufunc__ is not supported") +class TestArrayUfuncOptout: + + def test_add(self): + x = cupy.array([3, 7]) + y = MockArray2() + assert x + y == "radd" + assert y + x == "add" + + def test_matmul(self): + x = cupy.array([3, 7]) + y = MockArray2() + assert x @ y == "rmatmul" + assert y @ x == "matmul" + + def test_lt(self): + x = cupy.array([3, 7]) + y = MockArray2() + assert (x < y) == "gt" + assert (y < x) == "lt" + + +class TestAsnumpy: + + def test_asnumpy(self): + x = testing.shaped_random((2, 3, 4), cupy, cupy.float64) + y = cupy.asnumpy(x) + testing.assert_array_equal(x, y) + + @pytest.mark.skip("out keyword is not supported") + def test_asnumpy_out(self): + x = testing.shaped_random((2, 3, 4), cupy, cupy.float64) + y = cupyx.empty_like_pinned(x) + y = cupy.asnumpy(x, out=y) + testing.assert_array_equal(x, y) + assert isinstance(y.base, cupy.cuda.PinnedMemoryPointer) + assert y.base.ptr == y.ctypes.data + + @pytest.mark.skip("blocking keyword is not supported") + @pytest.mark.skipif( + int(os.environ.get("CUPY_ENABLE_UMP", 0)) == 1, + reason="blocking or not is irrelevant when zero-copy is on", + ) + @pytest.mark.parametrize("blocking", (True, False)) + def test_asnumpy_blocking(self, blocking): + prefactor = 4 + a = cupy.random.random( + prefactor * 128 * 1024 * 1024, dtype=cupy.float64 + ) + cupy.cuda.Device().synchronize() + + # Idea: perform D2H copy on a nonblocking stream, during which we try + # to "corrupt" the host data via NumPy operation. If the copy is + # properly ordered, corruption would not be possible. Here we craft a + # problem size and use pinned memory to ensure the failure can be + # always triggered. (The CUDART API reference ("API synchronization + # behavior") states that copying between device and pageable memory + # "might be" synchronous, whereas between device and page-locked + # memory "should be" fully asynchronous.) + s = cupy.cuda.Stream(non_blocking=True) + with s: + c = cupyx.empty_pinned(a.shape, dtype=a.dtype) + cupy.asnumpy(a, out=c, blocking=blocking) + c[c.size // 2 :] = -1.0 # potential data race + s.synchronize() + + a[c.size // 2 :] = -1.0 + if not blocking: + ctx = pytest.raises(AssertionError) + else: + ctx = contextlib.nullcontext() + with ctx: + assert cupy.allclose(a, c) From 40c9ba05de846c82396a7871803d233c6d44dca4 Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Tue, 5 Nov 2024 13:47:09 +0100 Subject: [PATCH 04/29] Update test_ndim.py --- tests/third_party/cupy/test_ndim.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/third_party/cupy/test_ndim.py b/tests/third_party/cupy/test_ndim.py index 0323ebb9794..446fadf524b 100644 --- a/tests/third_party/cupy/test_ndim.py +++ b/tests/third_party/cupy/test_ndim.py @@ -7,6 +7,7 @@ class TestNdim(unittest.TestCase): + @testing.numpy_cupy_equal() def test_ndim_ndarray1d(self, xp): return xp.ndim(xp.arange(5)) From 681f7ae1890d8d78260fb6406a1d40f1be7f8244 Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Tue, 5 Nov 2024 13:57:45 +0100 Subject: [PATCH 05/29] Add test_init.py --- tests/third_party/cupy/test_init.py | 171 ++++++++++++++++++++++++++++ 1 file changed, 171 insertions(+) create mode 100644 tests/third_party/cupy/test_init.py diff --git a/tests/third_party/cupy/test_init.py b/tests/third_party/cupy/test_init.py new file mode 100644 index 00000000000..97be323a4a3 --- /dev/null +++ b/tests/third_party/cupy/test_init.py @@ -0,0 +1,171 @@ +import operator +import os +import shutil +import subprocess +import sys +import tempfile +import unittest +from unittest import mock + +import numpy +import pytest + +import dpnp as cupy +from tests.third_party.cupy import testing + + +def _run_script(code): + # subprocess is required not to interfere with cupy module imported in top + # of this file + temp_dir = tempfile.mkdtemp() + try: + script_path = os.path.join(temp_dir, "script.py") + with open(script_path, "w") as f: + f.write(code) + proc = subprocess.Popen( + [sys.executable, script_path], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + stdoutdata, stderrdata = proc.communicate() + finally: + shutil.rmtree(temp_dir, ignore_errors=True) + return proc.returncode, stdoutdata, stderrdata + + +def _test_cupy_available(self): + returncode, stdoutdata, stderrdata = _run_script( + """ +import dpnp as cupy +print(cupy.is_available())""" + ) + assert returncode == 0, "stderr: {!r}".format(stderrdata) + assert stdoutdata in (b"True\n", b"True\r\n", b"False\n", b"False\r\n") + return stdoutdata == b"True\n" or stdoutdata == b"True\r\n" + + +class TestImportError(unittest.TestCase): + + def test_import_error(self): + returncode, stdoutdata, stderrdata = _run_script( + """ +try: + import dpnp as cupy +except Exception as e: + print(type(e).__name__) +""" + ) + assert returncode == 0, "stderr: {!r}".format(stderrdata) + assert stdoutdata in (b"", b"RuntimeError\n") + + +# if not cupy.cuda.runtime.is_hip: +# visible = "CUDA_VISIBLE_DEVICES" +# else: +# visible = "HIP_VISIBLE_DEVICES" + + +@pytest.mark.skip("dpnp.is_available() is not implemented") +class TestAvailable(unittest.TestCase): + + def test_available(self): + available = _test_cupy_available(self) + assert available + + +@pytest.mark.skip("dpnp.is_available() is not implemented") +class TestNotAvailable(unittest.TestCase): + + def setUp(self): + self.old = os.environ.get(visible) + + def tearDown(self): + if self.old is None: + os.environ.pop(visible) + else: + os.environ[visible] = self.old + + # @unittest.skipIf( + # cupy.cuda.runtime.is_hip, + # "HIP handles empty HIP_VISIBLE_DEVICES differently", + # ) + def test_no_device_1(self): + os.environ["CUDA_VISIBLE_DEVICES"] = " " + available = _test_cupy_available(self) + assert not available + + def test_no_device_2(self): + os.environ[visible] = "-1" + available = _test_cupy_available(self) + assert not available + + +@pytest.mark.skip("No memory pool API is supported") +class TestMemoryPool(unittest.TestCase): + + def test_get_default_memory_pool(self): + p = cupy.get_default_memory_pool() + assert isinstance(p, cupy.cuda.memory.MemoryPool) + + def test_get_default_pinned_memory_pool(self): + p = cupy.get_default_pinned_memory_pool() + assert isinstance(p, cupy.cuda.pinned_memory.PinnedMemoryPool) + + +@pytest.mark.skip("dpnp.show_config() is not implemented") +class TestShowConfig(unittest.TestCase): + + def test_show_config(self): + with mock.patch("sys.stdout.write") as write_func: + cupy.show_config() + write_func.assert_called_once_with( + str(cupyx.get_runtime_info(full=False)) + ) + + def test_show_config_with_handles(self): + with mock.patch("sys.stdout.write") as write_func: + cupy.show_config(_full=True) + write_func.assert_called_once_with( + str(cupyx.get_runtime_info(full=True)) + ) + + +class TestAliases(unittest.TestCase): + + def test_abs_is_absolute(self): + for xp in (numpy, cupy): + assert xp.abs is xp.absolute + + def test_conj_is_conjugate(self): + for xp in (numpy, cupy): + assert xp.conj is xp.conjugate + + def test_bitwise_not_is_invert(self): + for xp in (numpy, cupy): + assert xp.bitwise_not is xp.invert + + +@testing.with_requires("numpy>=2.0") +@pytest.mark.parametrize( + "name", + [ + "exceptions.AxisError", + "exceptions.ComplexWarning", + "exceptions.ModuleDeprecationWarning", + "exceptions.RankWarning", + "exceptions.TooHardError", + "exceptions.VisibleDeprecationWarning", + "linalg.LinAlgError", + ], +) +def test_error_classes(name): + get = operator.attrgetter(name) + assert issubclass(get(cupy), get(numpy)) + + +# This is copied from chainer/testing/__init__.py, so should be replaced in +# some way. +if __name__ == "__main__": + import pytest + + pytest.main([__file__, "-vvs", "-x", "--pdb"]) From a4a89f54ff0730ede3dc711ac038e875337b411f Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Tue, 5 Nov 2024 14:07:54 +0100 Subject: [PATCH 06/29] Add __init__.py --- tests/third_party/cupy/__init__.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 tests/third_party/cupy/__init__.py diff --git a/tests/third_party/cupy/__init__.py b/tests/third_party/cupy/__init__.py new file mode 100644 index 00000000000..e69de29bb2d From 46b166a460b372a0620976b401b037a68b5f1bee Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Tue, 5 Nov 2024 14:15:51 +0100 Subject: [PATCH 07/29] Update statistics_tests/test_order.py --- .../cupy/statistics_tests/test_order.py | 242 +++++++++++++++--- 1 file changed, 211 insertions(+), 31 deletions(-) diff --git a/tests/third_party/cupy/statistics_tests/test_order.py b/tests/third_party/cupy/statistics_tests/test_order.py index 3805933d490..efaadb30a88 100644 --- a/tests/third_party/cupy/statistics_tests/test_order.py +++ b/tests/third_party/cupy/statistics_tests/test_order.py @@ -7,19 +7,19 @@ from tests.third_party.cupy import testing _all_methods = ( - # "inverted_cdf", # TODO(takagi) Not implemented - # "averaged_inverted_cdf", # TODO(takagi) Not implemented - # "closest_observation", # TODO(takagi) Not implemented - # "interpolated_inverted_cdf", # TODO(takagi) Not implemented - # "hazen", # TODO(takagi) Not implemented - # "weibull", # TODO(takagi) Not implemented + # 'inverted_cdf', # TODO(takagi) Not implemented + # 'averaged_inverted_cdf', # TODO(takagi) Not implemented + # 'closest_observation', # TODO(takagi) Not implemented + # 'interpolated_inverted_cdf', # TODO(takagi) Not implemented + # 'hazen', # TODO(takagi) Not implemented + # 'weibull', # TODO(takagi) Not implemented "linear", - # "median_unbiased", # TODO(takagi) Not implemented - # "normal_unbiased", # TODO(takagi) Not implemented + # 'median_unbiased', # TODO(takagi) Not implemented + # 'normal_unbiased', # TODO(takagi) Not implemented "lower", "higher", "midpoint", - # "nearest", # TODO(hvy): Not implemented + "nearest", ) @@ -27,9 +27,61 @@ def for_all_methods(name="method"): return pytest.mark.parametrize(name, _all_methods) +@pytest.mark.skip("dpnp.quantile() is not implemented yet") @testing.with_requires("numpy>=1.22.0rc1") -class TestOrder: - @for_all_methods() +class TestQuantile: + + @testing.for_all_dtypes(no_float16=True, no_bool=True, no_complex=True) + def test_percentile_unexpected_method(self, dtype): + for xp in (numpy, cupy): + a = testing.shaped_random((4, 2, 3, 2), xp, dtype) + q = testing.shaped_random((5,), xp, dtype=dtype, scale=100) + with pytest.raises(ValueError): + xp.percentile(a, q, axis=-1, method="deadbeef") + + # See gh-4453 + @testing.for_float_dtypes() + def test_percentile_memory_access(self, dtype): + # Create an allocator that guarantees array allocated in + # cupy.percentile call will be followed by a NaN + original_allocator = cuda.get_allocator() + + def controlled_allocator(size): + memptr = original_allocator(size) + base_size = memptr.mem.size + assert base_size % 512 == 0 + item_size = dtype().itemsize + shape = (base_size // item_size,) + x = cupy.ndarray(memptr=memptr, shape=shape, dtype=dtype) + x.fill(cupy.nan) + return memptr + + # Check that percentile still returns non-NaN results + a = testing.shaped_random((5,), cupy, dtype) + q = cupy.array((0, 100), dtype=dtype) + + cuda.set_allocator(controlled_allocator) + try: + percentiles = cupy.percentile(a, q, axis=None, method="linear") + finally: + cuda.set_allocator(original_allocator) + + assert not cupy.any(cupy.isnan(percentiles)) + + @testing.for_all_dtypes(no_float16=True, no_bool=True, no_complex=True) + def test_quantile_unexpected_method(self, dtype): + for xp in (numpy, cupy): + a = testing.shaped_random((4, 2, 3, 2), xp, dtype) + q = testing.shaped_random((5,), xp, dtype=dtype, scale=1) + with pytest.raises(ValueError): + xp.quantile(a, q, axis=-1, method="deadbeef") + + +@pytest.mark.skip("dpnp.quantile() is not implemented yet") +@testing.with_requires("numpy>=1.22.0rc1") +@for_all_methods() +class TestQuantileMethods: + @testing.for_all_dtypes(no_float16=True, no_bool=True, no_complex=True) @testing.numpy_cupy_allclose() def test_percentile_defaults(self, xp, dtype, method): @@ -37,7 +89,6 @@ def test_percentile_defaults(self, xp, dtype, method): q = testing.shaped_random((3,), xp, dtype=dtype, scale=100) return xp.percentile(a, q, method=method) - @for_all_methods() @testing.for_all_dtypes(no_float16=True, no_bool=True, no_complex=True) @testing.numpy_cupy_allclose() def test_percentile_q_list(self, xp, dtype, method): @@ -45,7 +96,6 @@ def test_percentile_q_list(self, xp, dtype, method): q = [99, 99.9] return xp.percentile(a, q, method=method) - @for_all_methods() @testing.for_all_dtypes(no_float16=True, no_bool=True, no_complex=True) @testing.numpy_cupy_allclose(rtol=1e-6) def test_percentile_no_axis(self, xp, dtype, method): @@ -53,7 +103,6 @@ def test_percentile_no_axis(self, xp, dtype, method): q = testing.shaped_random((5,), xp, dtype=dtype, scale=100) return xp.percentile(a, q, axis=None, method=method) - @for_all_methods() @testing.for_all_dtypes(no_float16=True, no_bool=True, no_complex=True) @testing.numpy_cupy_allclose(rtol=1e-6) def test_percentile_neg_axis(self, xp, dtype, method): @@ -61,7 +110,6 @@ def test_percentile_neg_axis(self, xp, dtype, method): q = testing.shaped_random((5,), xp, dtype=dtype, scale=100) return xp.percentile(a, q, axis=-1, method=method) - @for_all_methods() @testing.for_all_dtypes(no_float16=True, no_bool=True, no_complex=True) @testing.numpy_cupy_allclose(rtol=1e-6) def test_percentile_tuple_axis(self, xp, dtype, method): @@ -69,7 +117,6 @@ def test_percentile_tuple_axis(self, xp, dtype, method): q = testing.shaped_random((5,), xp, dtype=dtype, scale=100) return xp.percentile(a, q, axis=(0, 1, 2), method=method) - @for_all_methods() @testing.for_all_dtypes(no_float16=True, no_bool=True, no_complex=True) @testing.numpy_cupy_allclose() def test_percentile_scalar_q(self, xp, dtype, method): @@ -77,7 +124,6 @@ def test_percentile_scalar_q(self, xp, dtype, method): q = 13.37 return xp.percentile(a, q, method=method) - @for_all_methods() @testing.for_all_dtypes(no_float16=True, no_bool=True, no_complex=True) @testing.numpy_cupy_allclose(rtol=1e-5) def test_percentile_keepdims(self, xp, dtype, method): @@ -85,7 +131,6 @@ def test_percentile_keepdims(self, xp, dtype, method): q = testing.shaped_random((5,), xp, dtype=dtype, scale=100) return xp.percentile(a, q, axis=None, keepdims=True, method=method) - @for_all_methods() @testing.for_float_dtypes(no_float16=True) # NumPy raises error on int8 @testing.numpy_cupy_allclose(rtol=1e-6) def test_percentile_out(self, xp, dtype, method): @@ -94,7 +139,17 @@ def test_percentile_out(self, xp, dtype, method): out = testing.shaped_random((5, 10, 2, 3), xp, dtype) return xp.percentile(a, q, axis=-1, method=method, out=out) - @for_all_methods() + @testing.for_float_dtypes(no_float16=True) + @testing.numpy_cupy_allclose(rtol=1e-6) + def test_percentile_overwrite(self, xp, dtype, method): + a = testing.shaped_random((10, 2, 3, 2), xp, dtype) + ap = a.copy() + q = testing.shaped_random((5,), xp, dtype=dtype, scale=100) + res = xp.percentile(ap, q, axis=-1, method=method, overwrite_input=True) + + assert not xp.all(ap == a) + return res + @testing.for_all_dtypes(no_float16=True, no_bool=True, no_complex=True) def test_percentile_bad_q(self, dtype, method): for xp in (numpy, cupy): @@ -104,12 +159,101 @@ def test_percentile_bad_q(self, dtype, method): xp.percentile(a, q, axis=-1, method=method) @testing.for_all_dtypes(no_float16=True, no_bool=True, no_complex=True) - def test_percentile_unxpected_method(self, dtype): + def test_percentile_out_of_range_q(self, dtype, method): for xp in (numpy, cupy): a = testing.shaped_random((4, 2, 3, 2), xp, dtype) - q = testing.shaped_random((5,), xp, dtype=dtype, scale=100) + for q in [[-0.1], [100.1]]: + with pytest.raises(ValueError): + xp.percentile(a, q, axis=-1, method=method) + + @testing.for_all_dtypes() + @testing.for_all_dtypes(no_float16=True, no_bool=True, no_complex=True) + @testing.numpy_cupy_allclose() + def test_quantile_defaults(self, xp, dtype, method): + a = testing.shaped_random((2, 3, 8), xp, dtype) + q = testing.shaped_random((3,), xp, scale=1) + return xp.quantile(a, q, method=method) + + @testing.for_all_dtypes(no_float16=True, no_bool=True, no_complex=True) + @testing.numpy_cupy_allclose() + def test_quantile_q_list(self, xp, dtype, method): + a = testing.shaped_arange((1001,), xp, dtype) + q = [0.99, 0.999] + return xp.quantile(a, q, method=method) + + @testing.for_all_dtypes(no_float16=True, no_bool=True, no_complex=True) + @testing.numpy_cupy_allclose(rtol=1e-5) + def test_quantile_no_axis(self, xp, dtype, method): + a = testing.shaped_random((10, 2, 4, 8), xp, dtype) + q = testing.shaped_random((5,), xp, scale=1) + return xp.quantile(a, q, axis=None, method=method) + + @testing.for_all_dtypes(no_float16=True, no_bool=True, no_complex=True) + @testing.numpy_cupy_allclose(rtol=1e-6) + def test_quantile_neg_axis(self, xp, dtype, method): + a = testing.shaped_random((4, 3, 10, 2, 8), xp, dtype) + q = testing.shaped_random((5,), xp, scale=1) + return xp.quantile(a, q, axis=-1, method=method) + + @testing.for_all_dtypes(no_float16=True, no_bool=True, no_complex=True) + @testing.numpy_cupy_allclose(rtol=1e-6) + def test_quantile_tuple_axis(self, xp, dtype, method): + a = testing.shaped_random((1, 6, 3, 2), xp, dtype) + q = testing.shaped_random((5,), xp, scale=1) + return xp.quantile(a, q, axis=(0, 1, 2), method=method) + + @testing.for_all_dtypes(no_float16=True, no_bool=True, no_complex=True) + @testing.numpy_cupy_allclose() + def test_quantile_scalar_q(self, xp, dtype, method): + a = testing.shaped_random((2, 3, 8), xp, dtype) + q = 0.1337 + return xp.quantile(a, q, method=method) + + @testing.for_all_dtypes(no_float16=True, no_bool=True, no_complex=True) + @testing.numpy_cupy_allclose(rtol=1e-5) + def test_quantile_keepdims(self, xp, dtype, method): + a = testing.shaped_random((7, 2, 9, 2), xp, dtype) + q = testing.shaped_random((5,), xp, scale=1) + return xp.quantile(a, q, axis=None, keepdims=True, method=method) + + @testing.for_float_dtypes(no_float16=True) # NumPy raises error on int8 + @testing.numpy_cupy_allclose(rtol=1e-6) + def test_quantile_out(self, xp, dtype, method): + a = testing.shaped_random((10, 2, 3, 2), xp, dtype) + q = testing.shaped_random((5,), xp, dtype=dtype, scale=1) + out = testing.shaped_random((5, 10, 2, 3), xp, dtype) + return xp.quantile(a, q, axis=-1, method=method, out=out) + + @testing.for_float_dtypes(no_float16=True) + @testing.numpy_cupy_allclose(rtol=1e-6) + def test_quantile_overwrite(self, xp, dtype, method): + a = testing.shaped_random((10, 2, 3, 2), xp, dtype) + ap = a.copy() + q = testing.shaped_random((5,), xp, dtype=dtype, scale=1) + + res = xp.quantile(a, q, axis=-1, method=method, overwrite_input=True) + + assert not xp.all(ap == a) + return res + + @testing.for_all_dtypes(no_float16=True, no_bool=True, no_complex=True) + def test_quantile_bad_q(self, dtype, method): + for xp in (numpy, cupy): + a = testing.shaped_random((4, 2, 3, 2), xp, dtype) + q = testing.shaped_random((1, 2, 3), xp, dtype=dtype, scale=1) with pytest.raises(ValueError): - xp.percentile(a, q, axis=-1, method="deadbeef") + xp.quantile(a, q, axis=-1, method=method) + + @testing.for_all_dtypes(no_float16=True, no_bool=True, no_complex=True) + def test_quantile_out_of_range_q(self, dtype, method): + for xp in (numpy, cupy): + a = testing.shaped_random((4, 2, 3, 2), xp, dtype) + for q in [[-0.1], [1.1]]: + with pytest.raises(ValueError): + xp.quantile(a, q, axis=-1, method=method) + + +class TestOrder: @testing.for_all_dtypes(no_complex=True) @testing.numpy_cupy_allclose() @@ -117,25 +261,25 @@ def test_nanmax_all(self, xp, dtype): a = testing.shaped_random((2, 3), xp, dtype) return xp.nanmax(a) - @testing.for_all_dtypes(no_complex=True) + @testing.for_all_dtypes() @testing.numpy_cupy_allclose() def test_nanmax_axis_large(self, xp, dtype): a = testing.shaped_random((3, 1000), xp, dtype) return xp.nanmax(a, axis=0) - @testing.for_all_dtypes(no_complex=True) + @testing.for_all_dtypes() @testing.numpy_cupy_allclose() def test_nanmax_axis0(self, xp, dtype): a = testing.shaped_random((2, 3, 4), xp, dtype) return xp.nanmax(a, axis=0) - @testing.for_all_dtypes(no_complex=True) + @testing.for_all_dtypes() @testing.numpy_cupy_allclose() def test_nanmax_axis1(self, xp, dtype): a = testing.shaped_random((2, 3, 4), xp, dtype) return xp.nanmax(a, axis=1) - @testing.for_all_dtypes(no_complex=True) + @testing.for_all_dtypes() @testing.numpy_cupy_allclose() def test_nanmax_axis2(self, xp, dtype): a = testing.shaped_random((2, 3, 4), xp, dtype) @@ -159,31 +303,31 @@ def test_nanmax_all_nan(self, xp, dtype): assert w[0].category is RuntimeWarning return m - @testing.for_all_dtypes(no_complex=True) + @testing.for_all_dtypes() @testing.numpy_cupy_allclose() def test_nanmin_all(self, xp, dtype): a = testing.shaped_random((2, 3), xp, dtype) return xp.nanmin(a) - @testing.for_all_dtypes(no_complex=True) + @testing.for_all_dtypes() @testing.numpy_cupy_allclose() def test_nanmin_axis_large(self, xp, dtype): a = testing.shaped_random((3, 1000), xp, dtype) return xp.nanmin(a, axis=0) - @testing.for_all_dtypes(no_complex=True) + @testing.for_all_dtypes() @testing.numpy_cupy_allclose() def test_nanmin_axis0(self, xp, dtype): a = testing.shaped_random((2, 3, 4), xp, dtype) return xp.nanmin(a, axis=0) - @testing.for_all_dtypes(no_complex=True) + @testing.for_all_dtypes() @testing.numpy_cupy_allclose() def test_nanmin_axis1(self, xp, dtype): a = testing.shaped_random((2, 3, 4), xp, dtype) return xp.nanmin(a, axis=1) - @testing.for_all_dtypes(no_complex=True) + @testing.for_all_dtypes() @testing.numpy_cupy_allclose() def test_nanmin_axis2(self, xp, dtype): a = testing.shaped_random((2, 3, 4), xp, dtype) @@ -248,3 +392,39 @@ def test_ptp_nan(self, xp, dtype): def test_ptp_all_nan(self, xp, dtype): a = xp.array([float("nan"), float("nan")], dtype) return xp.ptp(a) + + +# See gh-4607 +# "Magic" values used in this test were empirically found to result in +# non-monotonicity for less accurate linear interpolation formulas +@pytest.mark.skip("dpnp.percentile() is not implemented yet") +@testing.parameterize( + *testing.product( + { + "magic_value": ( + -29, + -53, + -207, + -16373, + -99999, + ) + } + ) +) +class TestPercentileMonotonic: + + @testing.with_requires("numpy>=1.22.0rc1") + @testing.for_float_dtypes(no_float16=True) + @testing.numpy_cupy_allclose() + def test_percentile_monotonic(self, dtype, xp): + a = testing.shaped_random((5,), xp, dtype) + + a[0] = self.magic_value + a[1] = self.magic_value + q = xp.linspace(0, 100, 21) + percentiles = xp.percentile(a, q, method="linear") + + # Assert that percentile output increases monotonically + assert xp.all(xp.diff(percentiles) >= 0) + + return percentiles From 24979dca0d57b32f82ebe5bbbd572793d1ac4fb5 Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Tue, 5 Nov 2024 14:33:52 +0100 Subject: [PATCH 08/29] Update statistics_tests/test_meanvar.py --- .../cupy/statistics_tests/test_meanvar.py | 93 +++++++++++++++---- 1 file changed, 77 insertions(+), 16 deletions(-) diff --git a/tests/third_party/cupy/statistics_tests/test_meanvar.py b/tests/third_party/cupy/statistics_tests/test_meanvar.py index 465d0e8e511..953dc930c05 100644 --- a/tests/third_party/cupy/statistics_tests/test_meanvar.py +++ b/tests/third_party/cupy/statistics_tests/test_meanvar.py @@ -1,3 +1,5 @@ +import math + import numpy import pytest from dpctl.tensor._numpy_helper import AxisError @@ -12,6 +14,7 @@ class TestMedian: + @testing.for_all_dtypes() @testing.numpy_cupy_allclose(type_check=has_support_aspect64()) def test_median_noaxis(self, xp, dtype): @@ -61,7 +64,14 @@ def test_median_invalid_axis(self): return xp.median(a, (-a.ndim - 1, 1), keepdims=False) with pytest.raises(AxisError): - return xp.median(a, (0, a.ndim), keepdims=False) + return xp.median( + a, + ( + 0, + a.ndim, + ), + keepdims=False, + ) @testing.for_dtypes("efdFD") @testing.numpy_cupy_allclose() @@ -83,6 +93,7 @@ def test_median_nan(self, xp, dtype): ) ) class TestMedianAxis: + @testing.for_all_dtypes() @testing.numpy_cupy_allclose(type_check=has_support_aspect64()) def test_median_axis_sequence(self, xp, dtype): @@ -90,7 +101,56 @@ def test_median_axis_sequence(self, xp, dtype): return xp.median(a, self.axis, keepdims=self.keepdims) +@pytest.mark.skip("dpnp.nanmedian() is not implemented yet") +@testing.parameterize( + *testing.product( + { + "shape": [(3, 4, 5)], + "axis": [None, 0, 1, -1, (0, 1), (0, 2), (-1, -2), [0, 1]], + "keepdims": [True, False], + "overwrite_input": [True, False], + } + ) +) +class TestNanMedian: + + zero_density = 0.25 + + def _make_array(self, dtype): + dtype = numpy.dtype(dtype) + if dtype.char in "efdFD": + r_dtype = dtype.char.lower() + a = testing.shaped_random(self.shape, numpy, dtype=r_dtype, scale=1) + if dtype.char in "FD": + ai = a + aj = testing.shaped_random( + self.shape, numpy, dtype=r_dtype, scale=1 + ) + ai[ai < math.sqrt(self.zero_density)] = 0 + aj[aj < math.sqrt(self.zero_density)] = 0 + a = ai + 1j * aj + else: + a[a < self.zero_density] = 0 + a = a / a + else: + a = testing.shaped_random(self.shape, numpy, dtype=dtype) + return a + + @testing.for_all_dtypes() + @testing.numpy_cupy_allclose() + def test_nanmedian(self, xp, dtype): + a = xp.array(self._make_array(dtype)) + out = xp.nanmedian( + a, + self.axis, + keepdims=self.keepdims, + overwrite_input=self.overwrite_input, + ) + return xp.ascontiguousarray(out) + + class TestAverage: + _multiprocess_can_split_ = True @testing.for_all_dtypes() @@ -115,7 +175,7 @@ def test_average_weights(self, xp, dtype): @testing.for_all_dtypes() @testing.numpy_cupy_allclose(rtol=2e-7, type_check=has_support_aspect64()) @pytest.mark.parametrize( - "axis, weights", [(1, False), (None, True), (1, True)] + "axis,weights", [(1, False), (None, True), (1, True)] ) def test_returned(self, xp, dtype, axis, weights): a = testing.shaped_arange((2, 3), xp, dtype) @@ -147,6 +207,7 @@ def test_average_keepdims_noaxis(self, xp, dtype, returned): class TestMeanVar: + @testing.for_all_dtypes() @testing.numpy_cupy_allclose(type_check=has_support_aspect64()) def test_mean_all(self, xp, dtype): @@ -296,13 +357,14 @@ def test_external_std_axis_ddof(self, xp, dtype): ) ) class TestNanMean: + @testing.for_all_dtypes(no_float16=True) @testing.numpy_cupy_allclose(rtol=1e-6, type_check=has_support_aspect64()) def test_nanmean_without_nan(self, xp, dtype): a = testing.shaped_random(self.shape, xp, dtype) return xp.nanmean(a, axis=self.axis, keepdims=self.keepdims) - @pytest.mark.usefixtures("suppress_mean_empty_slice_numpy_warnings") + @ignore_runtime_warnings @testing.for_all_dtypes(no_float16=True) @testing.numpy_cupy_allclose(rtol=1e-6, type_check=has_support_aspect64()) def test_nanmean_with_nan_float(self, xp, dtype): @@ -316,17 +378,13 @@ def test_nanmean_with_nan_float(self, xp, dtype): class TestNanMeanAdditional: - @pytest.mark.usefixtures("suppress_mean_empty_slice_numpy_warnings") + + @ignore_runtime_warnings @testing.for_all_dtypes(no_float16=True) - @testing.numpy_cupy_allclose(rtol=1e-6, type_check=has_support_aspect64()) + @testing.numpy_cupy_allclose(rtol=1e-6) def test_nanmean_out(self, xp, dtype): a = testing.shaped_random((10, 20, 30), xp, dtype) - # `numpy.mean` allows ``unsafe`` casting while `dpnp.mean` does not. - # So, output data type cannot be the same as input. - out_dtype = ( - cupy.default_float_type(a.device) if xp == cupy else numpy.float64 - ) - z = xp.zeros((20, 30), dtype=out_dtype) + z = xp.zeros((20, 30), dtype=dtype) if a.dtype.kind not in "biu": a[1, :] = xp.nan @@ -355,7 +413,7 @@ def test_nanmean_float16(self, xp): a[0][0] = xp.nan return xp.nanmean(a) - @pytest.mark.usefixtures("suppress_mean_empty_slice_numpy_warnings") + @ignore_runtime_warnings @testing.numpy_cupy_allclose(rtol=1e-6, type_check=has_support_aspect64()) def test_nanmean_all_nan(self, xp): a = xp.zeros((3, 4)) @@ -374,7 +432,8 @@ def test_nanmean_all_nan(self, xp): ) ) class TestNanVarStd: - @pytest.mark.usefixtures("suppress_dof_numpy_warnings") + + @ignore_runtime_warnings @testing.for_all_dtypes(no_float16=True) @testing.numpy_cupy_allclose(rtol=1e-6, type_check=has_support_aspect64()) def test_nanvar(self, xp, dtype): @@ -385,7 +444,7 @@ def test_nanvar(self, xp, dtype): a, axis=self.axis, ddof=self.ddof, keepdims=self.keepdims ) - @pytest.mark.usefixtures("suppress_dof_numpy_warnings") + @ignore_runtime_warnings @testing.for_all_dtypes(no_float16=True) @testing.numpy_cupy_allclose(rtol=1e-6, type_check=has_support_aspect64()) def test_nanstd(self, xp, dtype): @@ -398,7 +457,8 @@ def test_nanstd(self, xp, dtype): class TestNanVarStdAdditional: - @pytest.mark.usefixtures("suppress_dof_numpy_warnings") + + @ignore_runtime_warnings @testing.for_all_dtypes(no_float16=True) @testing.numpy_cupy_allclose(rtol=1e-6, type_check=has_support_aspect64()) def test_nanvar_out(self, xp, dtype): @@ -432,7 +492,7 @@ def test_nanvar_float16(self, xp): a[0][0] = xp.nan return xp.nanvar(a, axis=0) - @pytest.mark.usefixtures("suppress_dof_numpy_warnings") + @ignore_runtime_warnings @testing.for_all_dtypes(no_float16=True) @testing.numpy_cupy_allclose(rtol=1e-6, type_check=has_support_aspect64()) def test_nanstd_out(self, xp, dtype): @@ -488,6 +548,7 @@ def test_nanstd_float16(self, xp): "suppress_mean_empty_slice_numpy_warnings", ) class TestProductZeroLength: + @testing.for_all_dtypes(no_complex=True) @testing.numpy_cupy_allclose(type_check=has_support_aspect64()) def test_external_mean_zero_len(self, xp, dtype): From aba974af84d8129df4c8f4b8533a4bfd11c52646 Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Tue, 5 Nov 2024 14:48:24 +0100 Subject: [PATCH 09/29] Update statistics_tests/test_histogram.py --- .../cupy/statistics_tests/test_histogram.py | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/tests/third_party/cupy/statistics_tests/test_histogram.py b/tests/third_party/cupy/statistics_tests/test_histogram.py index 521bd4062fb..2cd3c37db1e 100644 --- a/tests/third_party/cupy/statistics_tests/test_histogram.py +++ b/tests/third_party/cupy/statistics_tests/test_histogram.py @@ -42,6 +42,7 @@ def for_all_dtypes_combination_bincount(names): class TestHistogram(unittest.TestCase): + @testing.for_all_dtypes(no_bool=True, no_complex=True) @testing.numpy_cupy_allclose(atol=1e-6, type_check=has_support_aspect64()) def test_histogram(self, xp, dtype): @@ -94,7 +95,7 @@ def test_histogram_range_with_density(self, xp, dtype): return h @testing.for_float_dtypes() - @testing.numpy_cupy_allclose(atol=1e-6, type_check=False) + @testing.numpy_cupy_allclose(atol=1e-6, type_check=has_support_aspect64()) def test_histogram_range_with_weights_and_density(self, xp, dtype): a = xp.arange(10, dtype=dtype) + 0.5 w = xp.arange(10, dtype=dtype) + 0.5 @@ -135,7 +136,9 @@ def test_histogram_int_weights_dtype(self, xp, dtype): def test_histogram_float_weights_dtype(self, xp, dtype): # Check the type of the returned histogram a = xp.arange(10, dtype=dtype) - h, b = xp.histogram(a, weights=xp.ones(10, dtype=xp.float32)) + h, b = xp.histogram( + a, weights=xp.ones(10, dtype=cupy.default_float_type()) + ) assert xp.issubdtype(h.dtype, xp.floating) return h @@ -363,6 +366,7 @@ def test_bincount_too_small_minlength(self, dtype): ) ) class TestDigitize: + @testing.for_all_dtypes(no_bool=True, no_complex=True) @testing.numpy_cupy_array_equal() def test_digitize(self, xp, dtype): @@ -377,6 +381,7 @@ def test_digitize(self, xp, dtype): @testing.parameterize({"right": True}, {"right": False}) class TestDigitizeNanInf(unittest.TestCase): + @testing.numpy_cupy_array_equal() def test_digitize_nan(self, xp): x = testing.shaped_arange((14,), xp, xp.float32) @@ -446,6 +451,7 @@ def test_searchsorted_minf(self, xp): class TestDigitizeInvalid(unittest.TestCase): + def test_digitize_complex(self): for xp in (numpy, cupy): x = testing.shaped_arange((14,), xp, xp.complex64) @@ -462,6 +468,7 @@ def test_digitize_nd_bins(self): @pytest.mark.skip("histogramdd() is not implemented yet") +# @pytest.mark.skip(reason="XXX: NP2.0: histogramdd dtype") @testing.parameterize( *testing.product( { @@ -481,6 +488,7 @@ def test_digitize_nd_bins(self): ) ) class TestHistogramdd: + @testing.for_all_dtypes(no_bool=True, no_complex=True) @testing.numpy_cupy_allclose(atol=1e-7, rtol=1e-7) def test_histogramdd(self, xp, dtype): @@ -507,6 +515,7 @@ def test_histogramdd(self, xp, dtype): @pytest.mark.skip("histogramdd() is not implemented yet") class TestHistogramddErrors(unittest.TestCase): + def test_histogramdd_invalid_bins(self): for xp in (numpy, cupy): x = testing.shaped_random((16, 2), xp, scale=100) @@ -552,6 +561,7 @@ def test_histogramdd_disallow_arraylike_bins(self): @pytest.mark.skip("histogram2d() is not implemented yet") +# @pytest.mark.skip(reason="XXX: NP2.0: histogram2d dtype") @testing.parameterize( *testing.product( { @@ -564,11 +574,13 @@ def test_histogramdd_disallow_arraylike_bins(self): ) ) class TestHistogram2d: + @testing.for_all_dtypes(no_bool=True, no_complex=True) - @testing.numpy_cupy_allclose(atol=1e-7, rtol=1e-7) + @testing.numpy_cupy_allclose(atol=1e-2, rtol=1e-7) def test_histogram2d(self, xp, dtype): x = testing.shaped_random((100,), xp, dtype, scale=100) y = testing.shaped_random((100,), xp, dtype, scale=100) + if self.bins == "array_list": bins = [xp.arange(0, 100, 4), xp.arange(0, 100, 10)] elif self.bins == "array": @@ -592,6 +604,7 @@ def test_histogram2d(self, xp, dtype): @pytest.mark.skip("histogram2d() is not implemented yet") class TestHistogram2dErrors(unittest.TestCase): + def test_histogram2d_disallow_arraylike_bins(self): x = testing.shaped_random((16,), cupy, scale=100) y = testing.shaped_random((16,), cupy, scale=100) From 64e8c85a9ca63c4d31eafe2afb285d371924fc15 Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Tue, 5 Nov 2024 15:01:11 +0100 Subject: [PATCH 10/29] Update statistics_tests/test_correlation.py --- tests/skipped_tests.tbl | 43 ------------------- tests/skipped_tests_gpu.tbl | 43 ------------------- .../cupy/statistics_tests/test_correlation.py | 37 ++++++++++------ 3 files changed, 24 insertions(+), 99 deletions(-) diff --git a/tests/skipped_tests.tbl b/tests/skipped_tests.tbl index d0ee8bb0b61..8dc250517be 100644 --- a/tests/skipped_tests.tbl +++ b/tests/skipped_tests.tbl @@ -309,46 +309,3 @@ tests/third_party/cupy/random_tests/test_sample.py::TestRandomIntegers2::test_bo tests/third_party/cupy/random_tests/test_sample.py::TestRandomIntegers2::test_bound_2 tests/third_party/cupy/random_tests/test_sample.py::TestRandomIntegers2::test_goodness_of_fit tests/third_party/cupy/random_tests/test_sample.py::TestRandomIntegers2::test_goodness_of_fit_2 - -tests/third_party/cupy/statistics_tests/test_correlation.py::TestCorrcoef::test_corrcoef -tests/third_party/cupy/statistics_tests/test_correlation.py::TestCorrcoef::test_corrcoef_diag_exception -tests/third_party/cupy/statistics_tests/test_correlation.py::TestCorrcoef::test_corrcoef_rowvar -tests/third_party/cupy/statistics_tests/test_correlation.py::TestCorrcoef::test_corrcoef_y - -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_defaults[linear] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_defaults[lower] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_defaults[higher] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_defaults[midpoint] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_q_list[linear] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_q_list[lower] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_q_list[higher] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_q_list[midpoint] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_no_axis[linear] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_no_axis[lower] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_no_axis[higher] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_no_axis[midpoint] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_neg_axis[linear] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_neg_axis[lower] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_neg_axis[higher] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_neg_axis[midpoint] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_tuple_axis[linear] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_tuple_axis[lower] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_tuple_axis[higher] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_tuple_axis[midpoint] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_scalar_q[linear] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_scalar_q[lower] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_scalar_q[higher] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_scalar_q[midpoint] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_keepdims[linear] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_keepdims[lower] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_keepdims[higher] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_keepdims[midpoint] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_out[linear] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_out[lower] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_out[higher] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_out[midpoint] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_bad_q[linear] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_bad_q[lower] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_bad_q[higher] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_bad_q[midpoint] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_unxpected_method diff --git a/tests/skipped_tests_gpu.tbl b/tests/skipped_tests_gpu.tbl index 82e7c2abee1..233c6b568f8 100644 --- a/tests/skipped_tests_gpu.tbl +++ b/tests/skipped_tests_gpu.tbl @@ -319,46 +319,3 @@ tests/third_party/cupy/random_tests/test_sample.py::TestRandomIntegers2::test_bo tests/third_party/cupy/random_tests/test_sample.py::TestRandomIntegers2::test_bound_2 tests/third_party/cupy/random_tests/test_sample.py::TestRandomIntegers2::test_goodness_of_fit tests/third_party/cupy/random_tests/test_sample.py::TestRandomIntegers2::test_goodness_of_fit_2 - -tests/third_party/cupy/statistics_tests/test_correlation.py::TestCorrcoef::test_corrcoef -tests/third_party/cupy/statistics_tests/test_correlation.py::TestCorrcoef::test_corrcoef_diag_exception -tests/third_party/cupy/statistics_tests/test_correlation.py::TestCorrcoef::test_corrcoef_rowvar -tests/third_party/cupy/statistics_tests/test_correlation.py::TestCorrcoef::test_corrcoef_y - -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_defaults[linear] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_defaults[lower] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_defaults[higher] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_defaults[midpoint] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_q_list[linear] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_q_list[lower] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_q_list[higher] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_q_list[midpoint] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_no_axis[linear] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_no_axis[lower] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_no_axis[higher] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_no_axis[midpoint] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_neg_axis[linear] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_neg_axis[lower] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_neg_axis[higher] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_neg_axis[midpoint] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_tuple_axis[linear] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_tuple_axis[lower] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_tuple_axis[higher] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_tuple_axis[midpoint] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_scalar_q[linear] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_scalar_q[lower] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_scalar_q[higher] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_scalar_q[midpoint] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_keepdims[linear] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_keepdims[lower] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_keepdims[higher] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_keepdims[midpoint] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_out[linear] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_out[lower] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_out[higher] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_out[midpoint] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_bad_q[linear] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_bad_q[lower] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_bad_q[higher] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_bad_q[midpoint] -tests/third_party/cupy/statistics_tests/test_order.py::TestOrder::test_percentile_unxpected_method diff --git a/tests/third_party/cupy/statistics_tests/test_correlation.py b/tests/third_party/cupy/statistics_tests/test_correlation.py index be6b815629f..59f5f1d4aca 100644 --- a/tests/third_party/cupy/statistics_tests/test_correlation.py +++ b/tests/third_party/cupy/statistics_tests/test_correlation.py @@ -1,16 +1,16 @@ -import sys import unittest import numpy import pytest -from dpctl import select_default_device import dpnp as cupy from tests.helper import has_support_aspect64 from tests.third_party.cupy import testing +@pytest.mark.skip("dpnp.corrcoef() is not implemented yet") class TestCorrcoef(unittest.TestCase): + @testing.for_all_dtypes() @testing.numpy_cupy_allclose() def test_corrcoef(self, xp, dtype): @@ -37,10 +37,16 @@ def test_corrcoef_rowvar(self, xp, dtype): y = testing.shaped_arange((2, 3), xp, dtype) return xp.corrcoef(a, y=y, rowvar=False) + @testing.with_requires("numpy>=1.20") + @testing.for_all_dtypes() + @testing.numpy_cupy_allclose(accept_error=True) + def test_corrcoef_dtype(self, xp, dtype): + a = testing.shaped_arange((2, 3), xp, dtype) + y = testing.shaped_arange((2, 3), xp, dtype) + return xp.corrcoef(a, y=y, dtype=dtype) + class TestCov(unittest.TestCase): - # resulting dtype will differ with numpy if no fp64 support by a default device - _has_fp64 = select_default_device().has_aspect_fp64 def generate_input(self, a_shape, y_shape, xp, dtype): a = testing.shaped_arange(a_shape, xp, dtype) @@ -50,7 +56,9 @@ def generate_input(self, a_shape, y_shape, xp, dtype): return a, y @testing.for_all_dtypes() - @testing.numpy_cupy_allclose(type_check=_has_fp64, accept_error=True) + @testing.numpy_cupy_allclose( + type_check=has_support_aspect64(), accept_error=True + ) def check( self, a_shape, @@ -153,6 +161,7 @@ def test_cov_empty(self): ) @pytest.mark.usefixtures("allow_fall_back_on_numpy") class TestCorrelateShapeCombination(unittest.TestCase): + @testing.for_all_dtypes(no_float16=True) @testing.numpy_cupy_allclose(rtol=1e-4) def test_correlate(self, xp, dtype): @@ -161,34 +170,36 @@ def test_correlate(self, xp, dtype): return xp.correlate(a, b, mode=self.mode) -@testing.parameterize(*testing.product({"mode": ["valid", "full", "same"]})) +@pytest.mark.parametrize("mode", ["valid", "full", "same"]) @pytest.mark.usefixtures("allow_fall_back_on_numpy") -class TestCorrelate(unittest.TestCase): +class TestCorrelate: + @testing.for_all_dtypes() @testing.numpy_cupy_allclose(rtol=1e-5) - def test_correlate_non_contiguous(self, xp, dtype): + def test_correlate_non_contiguous(self, xp, dtype, mode): a = testing.shaped_arange((300,), xp, dtype) b = testing.shaped_arange((100,), xp, dtype) - return xp.correlate(a[::200], b[10::70], mode=self.mode) + return xp.correlate(a[::200], b[10::70], mode=mode) @testing.for_all_dtypes(no_float16=True) @testing.numpy_cupy_allclose(rtol=1e-4) - def test_correlate_large_non_contiguous(self, xp, dtype): + def test_correlate_large_non_contiguous(self, xp, dtype, mode): a = testing.shaped_arange((10000,), xp, dtype) b = testing.shaped_arange((1000,), xp, dtype) - return xp.correlate(a[200::], b[10::700], mode=self.mode) + return xp.correlate(a[200::], b[10::700], mode=mode) @testing.for_all_dtypes_combination(names=["dtype1", "dtype2"]) @testing.numpy_cupy_allclose(rtol=1e-2, type_check=has_support_aspect64()) - def test_correlate_diff_types(self, xp, dtype1, dtype2): + def test_correlate_diff_types(self, xp, dtype1, dtype2, mode): a = testing.shaped_random((200,), xp, dtype1) b = testing.shaped_random((100,), xp, dtype2) - return xp.correlate(a, b, mode=self.mode) + return xp.correlate(a, b, mode=mode) @testing.parameterize(*testing.product({"mode": ["valid", "same", "full"]})) @pytest.mark.usefixtures("allow_fall_back_on_numpy") class TestCorrelateInvalid(unittest.TestCase): + @testing.with_requires("numpy>=1.18") @testing.for_all_dtypes() def test_correlate_empty(self, dtype): From c2d281546be42058fcd7eeb252c0940675d9ed14 Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Tue, 5 Nov 2024 15:02:56 +0100 Subject: [PATCH 11/29] Add new tests to scope of public CI --- .github/workflows/conda-package.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/conda-package.yml b/.github/workflows/conda-package.yml index 5bb9aeb51db..6070cb54785 100644 --- a/.github/workflows/conda-package.yml +++ b/.github/workflows/conda-package.yml @@ -63,10 +63,11 @@ env: third_party/cupy/math_tests third_party/cupy/padding_tests third_party/cupy/sorting_tests - third_party/cupy/statistics_tests/test_histogram.py - third_party/cupy/statistics_tests/test_meanvar.py + third_party/cupy/statistics_tests third_party/cupy/test_ndim.py + third_party/cupy/test_numpy_interop.py third_party/cupy/test_type_routines.py + third_party/cupy/test_typing.py VER_JSON_NAME: 'version.json' VER_SCRIPT1: "import json; f = open('version.json', 'r'); j = json.load(f); f.close(); " VER_SCRIPT2: "d = j['dpnp'][0]; print('='.join((d[s] for s in ('version', 'build'))))" From 3a8cb9b0d97aac7d4e08213db0a2d184da166f39 Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Tue, 5 Nov 2024 16:06:28 +0100 Subject: [PATCH 12/29] Fix issue with fp64 in test_numpy_interop.py::test_asnumpy --- tests/third_party/cupy/test_numpy_interop.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/third_party/cupy/test_numpy_interop.py b/tests/third_party/cupy/test_numpy_interop.py index 2999cf84092..eab59d506fd 100644 --- a/tests/third_party/cupy/test_numpy_interop.py +++ b/tests/third_party/cupy/test_numpy_interop.py @@ -145,7 +145,7 @@ def test_lt(self): class TestAsnumpy: def test_asnumpy(self): - x = testing.shaped_random((2, 3, 4), cupy, cupy.float64) + x = testing.shaped_random((2, 3, 4), cupy, cupy.default_float_type()) y = cupy.asnumpy(x) testing.assert_array_equal(x, y) From de1c46d07b3e58f536bb337d75ef04e7c4dbeac8 Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Tue, 5 Nov 2024 18:27:40 +0100 Subject: [PATCH 13/29] dpnp.exceptions is not implemented --- tests/third_party/cupy/test_init.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/third_party/cupy/test_init.py b/tests/third_party/cupy/test_init.py index 97be323a4a3..711991c39dc 100644 --- a/tests/third_party/cupy/test_init.py +++ b/tests/third_party/cupy/test_init.py @@ -145,6 +145,7 @@ def test_bitwise_not_is_invert(self): assert xp.bitwise_not is xp.invert +@pytest.mark.skip("dpnp.exceptions is not implemented") @testing.with_requires("numpy>=2.0") @pytest.mark.parametrize( "name", From b7b73f7a362fb75055545b223ca690996bdd6cb4 Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Thu, 14 Nov 2024 16:10:20 +0100 Subject: [PATCH 14/29] Enable TestCorrcoef scope --- .../cupy/statistics_tests/test_correlation.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/tests/third_party/cupy/statistics_tests/test_correlation.py b/tests/third_party/cupy/statistics_tests/test_correlation.py index e19eee27ea8..05a791aff55 100644 --- a/tests/third_party/cupy/statistics_tests/test_correlation.py +++ b/tests/third_party/cupy/statistics_tests/test_correlation.py @@ -8,7 +8,6 @@ from tests.third_party.cupy import testing -@pytest.mark.skip("dpnp.corrcoef() is not implemented yet") class TestCorrcoef(unittest.TestCase): @testing.for_all_dtypes() @@ -43,7 +42,13 @@ def test_corrcoef_rowvar(self, xp, dtype): def test_corrcoef_dtype(self, xp, dtype): a = testing.shaped_arange((2, 3), xp, dtype) y = testing.shaped_arange((2, 3), xp, dtype) - return xp.corrcoef(a, y=y, dtype=dtype) + try: + res = xp.corrcoef(a, y=y, dtype=dtype) + except ValueError as e: + if xp is cupy: # dpnp raises ValueError(...) + raise TypeError(e) + raise + return res class TestCov(unittest.TestCase): From 9c887a91af8b1c5d00028fb78b16222169499fe3 Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Thu, 14 Nov 2024 16:24:08 +0100 Subject: [PATCH 15/29] Update sorting_tests/test_count.py --- tests/third_party/cupy/sorting_tests/test_count.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/tests/third_party/cupy/sorting_tests/test_count.py b/tests/third_party/cupy/sorting_tests/test_count.py index 117f3be94f6..193142f2b2e 100644 --- a/tests/third_party/cupy/sorting_tests/test_count.py +++ b/tests/third_party/cupy/sorting_tests/test_count.py @@ -7,6 +7,7 @@ class TestCount(unittest.TestCase): + @testing.for_all_dtypes() def test_count_nonzero(self, dtype): def func(xp): @@ -17,7 +18,7 @@ def func(xp): # CuPy returns zero-dimensional array instead of # returning a scalar value assert isinstance(c, xp.ndarray) - assert c.dtype == "p" + assert c.dtype == "l" assert c.shape == () return int(c) @@ -32,7 +33,7 @@ def func(xp): # CuPy returns zero-dimensional array instead of # returning a scalar value assert isinstance(c, xp.ndarray) - assert c.dtype == "p" + assert c.dtype == "l" assert c.shape == () return int(c) From 4e7b432425432bf89fc3e8dd61bec469e3a48188 Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Thu, 14 Nov 2024 16:32:42 +0100 Subject: [PATCH 16/29] Update sorting_tests/test_search.py --- .../cupy/sorting_tests/test_search.py | 56 ++++++++++++++----- 1 file changed, 43 insertions(+), 13 deletions(-) diff --git a/tests/third_party/cupy/sorting_tests/test_search.py b/tests/third_party/cupy/sorting_tests/test_search.py index 9b34ba14312..03c57e6d864 100644 --- a/tests/third_party/cupy/sorting_tests/test_search.py +++ b/tests/third_party/cupy/sorting_tests/test_search.py @@ -7,6 +7,7 @@ class TestSearch: + @testing.for_all_dtypes(no_complex=True) @testing.numpy_cupy_allclose() def test_argmax_all(self, xp, dtype): @@ -167,6 +168,13 @@ def test_argmin_int32_overflow(self): assert a.argmin().item() == 2**32 +# TODO(leofang): remove this once CUDA 9.0 is dropped +def _skip_cuda90(dtype): + ver = cupy.cuda.runtime.runtimeGetVersion() + if dtype == cupy.float16 and ver == 9000: + pytest.skip("CUB does not support fp16 on CUDA 9.0") + + # This class compares CUB results against NumPy's # TODO(leofang): test axis after support is added @testing.parameterize( @@ -180,6 +188,7 @@ def test_argmin_int32_overflow(self): ) @pytest.mark.skip("The CUB routine is not enabled") class TestCubReduction: + @pytest.fixture(autouse=True) def setUp(self): self.order, self.axis = self.order_and_axis @@ -200,6 +209,7 @@ def setUp(self): @testing.for_dtypes("bhilBHILefdFD") @testing.numpy_cupy_allclose(rtol=1e-5, contiguous_check=False) def test_cub_argmin(self, xp, dtype): + _skip_cuda90(dtype) a = testing.shaped_random(self.shape, xp, dtype) if self.order == "C": a = xp.ascontiguousarray(a) @@ -220,7 +230,7 @@ def test_cub_argmin(self, xp, dtype): # this is the only function we can mock; the rest is cdef'd func_name = "cupy._core._cub_reduction." func_name += "_SimpleCubReductionKernel_get_cached_function" - # func = _cub_reduction._SimpleCubReductionKernel_get_cached_function + func = _cub_reduction._SimpleCubReductionKernel_get_cached_function if self.axis is not None and len(self.shape) > 1: times_called = 1 # one pass else: @@ -235,7 +245,7 @@ def test_cub_argmin(self, xp, dtype): @testing.for_dtypes("bhilBHILefdFD") @testing.numpy_cupy_allclose(rtol=1e-5, contiguous_check=False) def test_cub_argmax(self, xp, dtype): - # _skip_cuda90(dtype) + _skip_cuda90(dtype) a = testing.shaped_random(self.shape, xp, dtype) if self.order == "C": a = xp.ascontiguousarray(a) @@ -256,7 +266,7 @@ def test_cub_argmax(self, xp, dtype): # this is the only function we can mock; the rest is cdef'd func_name = "cupy._core._cub_reduction." func_name += "_SimpleCubReductionKernel_get_cached_function" - # func = _cub_reduction._SimpleCubReductionKernel_get_cached_function + func = _cub_reduction._SimpleCubReductionKernel_get_cached_function if self.axis is not None and len(self.shape) > 1: times_called = 1 # one pass else: @@ -280,6 +290,7 @@ def test_cub_argmax(self, xp, dtype): ) @pytest.mark.skip("dtype is not supported") class TestArgMinMaxDtype: + @testing.for_dtypes( dtypes=[numpy.int8, numpy.int16, numpy.int32, numpy.int64], name="result_dtype", @@ -304,6 +315,7 @@ def test_argminmax_dtype(self, in_dtype, result_dtype): {"cond_shape": (3, 4), "x_shape": (2, 3, 4), "y_shape": (4,)}, ) class TestWhereTwoArrays: + @testing.for_all_dtypes_combination(names=["cond_type", "x_type", "y_type"]) @testing.numpy_cupy_allclose(type_check=has_support_aspect64()) def test_where_two_arrays(self, xp, cond_type, x_type, y_type): @@ -323,6 +335,7 @@ def test_where_two_arrays(self, xp, cond_type, x_type, y_type): {"cond_shape": (3, 4)}, ) class TestWhereCond: + @testing.for_all_dtypes() @testing.numpy_cupy_array_equal() def test_where_cond(self, xp, dtype): @@ -332,6 +345,7 @@ def test_where_cond(self, xp, dtype): class TestWhereError: + def test_one_argument(self): for xp in (numpy, cupy): cond = testing.shaped_random((3, 4), xp, dtype=xp.bool_) @@ -349,6 +363,7 @@ def test_one_argument(self): _ids=False, # Do not generate ids from randomly generated params ) class TestNonzero: + @testing.for_all_dtypes() @testing.numpy_cupy_array_equal() def test_nonzero(self, xp, dtype): @@ -360,15 +375,21 @@ def test_nonzero(self, xp, dtype): {"array": numpy.array(0)}, {"array": numpy.array(1)}, ) -@pytest.mark.skip("Only positive rank is supported") @testing.with_requires("numpy>=1.17.0") class TestNonzeroZeroDimension: + + @testing.with_requires("numpy>=2.1") + @testing.for_all_dtypes() + def test_nonzero(self, dtype): + array = cupy.array(self.array, dtype=dtype) + with pytest.raises(ValueError): + cupy.nonzero(array) + @testing.for_all_dtypes() @testing.numpy_cupy_array_equal() - def test_nonzero(self, xp, dtype): + def test_nonzero_explicit(self, xp, dtype): array = xp.array(self.array, dtype=dtype) - with testing.assert_warns(DeprecationWarning): - return xp.nonzero(array) + return xp.nonzero(xp.atleast_1d(array)) @testing.parameterize( @@ -382,6 +403,7 @@ def test_nonzero(self, xp, dtype): _ids=False, # Do not generate ids from randomly generated params ) class TestFlatNonzero: + @testing.for_all_dtypes() @testing.numpy_cupy_array_equal() def test_flatnonzero(self, xp, dtype): @@ -398,6 +420,7 @@ def test_flatnonzero(self, xp, dtype): _ids=False, # Do not generate ids from randomly generated params ) class TestArgwhere: + @testing.for_all_dtypes() @testing.numpy_cupy_array_equal() def test_argwhere(self, xp, dtype): @@ -411,6 +434,7 @@ def test_argwhere(self, xp, dtype): ) @testing.with_requires("numpy>=1.18") class TestArgwhereZeroDimension: + @testing.for_all_dtypes() @testing.numpy_cupy_array_equal() def test_argwhere(self, xp, dtype): @@ -419,6 +443,7 @@ def test_argwhere(self, xp, dtype): class TestNanArgMin: + @testing.for_all_dtypes(no_complex=True) @testing.numpy_cupy_allclose() def test_nanargmin_all(self, xp, dtype): @@ -509,6 +534,7 @@ def test_nanargmin_zero_size_axis1(self, xp, dtype): class TestNanArgMax: + @testing.for_all_dtypes(no_complex=True) @testing.numpy_cupy_allclose() def test_nanargmax_all(self, xp, dtype): @@ -620,6 +646,7 @@ def test_nanargmax_zero_size_axis1(self, xp, dtype): ) ) class TestSearchSorted: + @testing.for_all_dtypes(no_bool=True) @testing.numpy_cupy_array_equal() def test_searchsorted(self, xp, dtype): @@ -639,6 +666,7 @@ def test_ndarray_searchsorted(self, xp, dtype): @testing.parameterize({"side": "left"}, {"side": "right"}) class TestSearchSortedNanInf: + @testing.numpy_cupy_array_equal() def test_searchsorted_nanbins(self, xp): x = testing.shaped_arange((10,), xp, xp.float64) @@ -704,6 +732,7 @@ def test_searchsorted_minf(self, xp): class TestSearchSortedInvalid: + # Can't test unordered bins due to numpy undefined # behavior for searchsorted @@ -723,6 +752,7 @@ def test_ndarray_searchsorted_ndbins(self): class TestSearchSortedWithSorter: + @testing.numpy_cupy_array_equal() def test_sorter(self, xp): x = testing.shaped_arange((12,), xp, xp.float64) @@ -741,16 +771,16 @@ def test_invalid_sorter(self): def test_nonint_sorter(self): for xp in (numpy, cupy): - dt = cupy.default_float_type() - x = testing.shaped_arange((12,), xp, dt) + x = testing.shaped_arange((12,), xp, xp.float32) bins = xp.array([10, 4, 2, 1, 8]) - sorter = xp.array([], dtype=dt) + sorter = xp.array([], dtype=xp.float32) with pytest.raises((TypeError, ValueError)): xp.searchsorted(bins, x, sorter=sorter) @testing.parameterize({"side": "left"}, {"side": "right"}) class TestNdarraySearchSortedNanInf: + @testing.numpy_cupy_array_equal() def test_searchsorted_nanbins(self, xp): x = testing.shaped_arange((10,), xp, xp.float64) @@ -816,6 +846,7 @@ def test_searchsorted_minf(self, xp): class TestNdarraySearchSortedWithSorter: + @testing.numpy_cupy_array_equal() def test_sorter(self, xp): x = testing.shaped_arange((12,), xp, xp.float64) @@ -834,9 +865,8 @@ def test_invalid_sorter(self): def test_nonint_sorter(self): for xp in (numpy, cupy): - dt = cupy.default_float_type() - x = testing.shaped_arange((12,), xp, dt) + x = testing.shaped_arange((12,), xp, xp.float32) bins = xp.array([10, 4, 2, 1, 8]) - sorter = xp.array([], dtype=dt) + sorter = xp.array([], dtype=xp.float32) with pytest.raises((TypeError, ValueError)): bins.searchsorted(x, sorter=sorter) From d2929567c1b03caed9d735ca3e93a4f72c719689 Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Thu, 14 Nov 2024 16:43:10 +0100 Subject: [PATCH 17/29] Update sorting_tests/test_sort.py --- .../cupy/sorting_tests/test_sort.py | 49 +++++++------------ 1 file changed, 17 insertions(+), 32 deletions(-) diff --git a/tests/third_party/cupy/sorting_tests/test_sort.py b/tests/third_party/cupy/sorting_tests/test_sort.py index 5a90a8df278..1be7ac0a592 100644 --- a/tests/third_party/cupy/sorting_tests/test_sort.py +++ b/tests/third_party/cupy/sorting_tests/test_sort.py @@ -20,6 +20,7 @@ def get_array_module(*args): class TestSort(unittest.TestCase): + # Test ranks def test_sort_zero_dim(self): @@ -68,11 +69,11 @@ def test_sort_contiguous(self, xp): a.sort() return a - @testing.numpy_cupy_array_equal() - def test_sort_non_contiguous(self, xp): - a = testing.shaped_random((10,), xp)[::2] # Non contiguous view - a.sort() - return a + @pytest.mark.skip("non-contiguous array is supported") + def test_sort_non_contiguous(self): + a = testing.shaped_random((10,), cupy)[::2] # Non contiguous view + with self.assertRaises(NotImplementedError): + a.sort() @testing.numpy_cupy_array_equal() def test_external_sort_contiguous(self, xp): @@ -214,6 +215,7 @@ def test_large(self, xp): @pytest.mark.skip("lexsort() is not implemented yet") class TestLexsort(unittest.TestCase): + # Test ranks # TODO(niboshi): Fix xfail @@ -298,12 +300,15 @@ def test_F_order(self, xp): ) ) class TestArgsort(unittest.TestCase): - def argsort(self, a, axis=-1, kind=None): + + def argsort(self, a, axis=-1): if self.external: + # Need to explicitly specify kind="stable" + # numpy uses "quicksort" as default xp = cupy.get_array_module(a) - return xp.argsort(a, axis=axis, kind=kind) + return xp.argsort(a, axis=axis, kind="stable") else: - return a.argsort(axis=axis, kind=kind) + return a.argsort(axis=axis, kind="stable") # Test base cases @@ -319,7 +324,7 @@ def test_argsort_zero_dim(self, xp, dtype): @testing.numpy_cupy_array_equal() def test_argsort_one_dim(self, xp, dtype): a = testing.shaped_random((10,), xp, dtype) - return self.argsort(a, axis=-1, kind="stable") + return self.argsort(a) @testing.for_all_dtypes() @testing.numpy_cupy_array_equal() @@ -414,30 +419,8 @@ def test_nan2(self, xp, dtype): return self.argsort(a) -@pytest.mark.skip("msort() is deprecated") -class TestMsort(unittest.TestCase): - # Test base cases - - def test_msort_zero_dim(self): - for xp in (numpy, cupy): - a = testing.shaped_random((), xp) - with pytest.raises(AxisError): - xp.msort(a) - - @testing.for_all_dtypes() - @testing.numpy_cupy_array_equal() - def test_msort_one_dim(self, xp, dtype): - a = testing.shaped_random((10,), xp, dtype) - return xp.msort(a) - - @testing.for_all_dtypes() - @testing.numpy_cupy_array_equal() - def test_msort_multi_dim(self, xp, dtype): - a = testing.shaped_random((2, 3), xp, dtype) - return xp.msort(a) - - class TestSort_complex(unittest.TestCase): + def test_sort_complex_zero_dim(self): for xp in (numpy, cupy): a = testing.shaped_random((), xp) @@ -474,6 +457,7 @@ def test_sort_complex_nan(self, xp, dtype): ) @pytest.mark.usefixtures("allow_fall_back_on_numpy") class TestPartition(unittest.TestCase): + def partition(self, a, kth, axis=-1): if self.external: xp = cupy.get_array_module(a) @@ -622,6 +606,7 @@ def test_partition_invalid_negative_axis2(self): ) @pytest.mark.skip("not fully supported yet") class TestArgpartition(unittest.TestCase): + def argpartition(self, a, kth, axis=-1): if self.external: xp = cupy.get_array_module(a) From 060fdf92a6750763b2050856b416819c803afa46 Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Thu, 14 Nov 2024 18:02:33 +0100 Subject: [PATCH 18/29] Keep test_count_nonzero unchanged to work on Windows --- tests/third_party/cupy/sorting_tests/test_count.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/third_party/cupy/sorting_tests/test_count.py b/tests/third_party/cupy/sorting_tests/test_count.py index 193142f2b2e..d55e29cef27 100644 --- a/tests/third_party/cupy/sorting_tests/test_count.py +++ b/tests/third_party/cupy/sorting_tests/test_count.py @@ -18,7 +18,7 @@ def func(xp): # CuPy returns zero-dimensional array instead of # returning a scalar value assert isinstance(c, xp.ndarray) - assert c.dtype == "l" + assert c.dtype == "p" assert c.shape == () return int(c) @@ -33,7 +33,7 @@ def func(xp): # CuPy returns zero-dimensional array instead of # returning a scalar value assert isinstance(c, xp.ndarray) - assert c.dtype == "l" + assert c.dtype == "p" assert c.shape == () return int(c) From cf4ee15863df25974c7b73d8cb97427cb00e192f Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Fri, 15 Nov 2024 13:03:15 +0100 Subject: [PATCH 19/29] Exclude random tests from files with skipped tests --- tests/skipped_tests.tbl | 176 --------------------------- tests/skipped_tests_gpu.tbl | 177 ---------------------------- tests/skipped_tests_gpu_no_fp64.tbl | 171 --------------------------- 3 files changed, 524 deletions(-) diff --git a/tests/skipped_tests.tbl b/tests/skipped_tests.tbl index 8dc250517be..86f971f32ce 100644 --- a/tests/skipped_tests.tbl +++ b/tests/skipped_tests.tbl @@ -133,179 +133,3 @@ tests/third_party/cupy/math_tests/test_misc.py::TestMisc::test_interp_inf_fx tests/third_party/cupy/math_tests/test_misc.py::TestMisc::test_interp_inf_x tests/third_party/cupy/math_tests/test_misc.py::TestMisc::test_interp_size1 tests/third_party/cupy/math_tests/test_misc.py::TestMisc::test_interp_inf_to_nan - -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsBeta_param_0_{a_shape=(), b_shape=(), shape=(4, 3, 2)}::test_beta -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsBeta_param_1_{a_shape=(), b_shape=(), shape=(3, 2)}::test_beta -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsBeta_param_2_{a_shape=(), b_shape=(3, 2), shape=(4, 3, 2)}::test_beta -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsBeta_param_3_{a_shape=(), b_shape=(3, 2), shape=(3, 2)}::test_beta -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsBeta_param_4_{a_shape=(3, 2), b_shape=(), shape=(4, 3, 2)}::test_beta -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsBeta_param_5_{a_shape=(3, 2), b_shape=(), shape=(3, 2)}::test_beta -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsExponential_param_0_{scale_shape=(), shape=(4, 3, 2)}::test_exponential -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsExponential_param_1_{scale_shape=(), shape=(3, 2)}::test_exponential -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsExponential_param_2_{scale_shape=(), shape=None}::test_exponential -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGeometric_param_0_{p_shape=(), shape=(4, 3, 2)}::test_geometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGeometric_param_1_{p_shape=(), shape=(3, 2)}::test_geometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGeometric_param_2_{p_shape=(3, 2), shape=(4, 3, 2)}::test_geometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGeometric_param_3_{p_shape=(3, 2), shape=(3, 2)}::test_geometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_0_{nbad_shape=(), ngood_shape=(), nsample_dtype=int32, nsample_shape=(), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_1_{nbad_shape=(), ngood_shape=(), nsample_dtype=int32, nsample_shape=(), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_2_{nbad_shape=(), ngood_shape=(), nsample_dtype=int32, nsample_shape=(3, 2), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_3_{nbad_shape=(), ngood_shape=(), nsample_dtype=int32, nsample_shape=(3, 2), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_4_{nbad_shape=(), ngood_shape=(), nsample_dtype=int64, nsample_shape=(), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_5_{nbad_shape=(), ngood_shape=(), nsample_dtype=int64, nsample_shape=(), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_6_{nbad_shape=(), ngood_shape=(), nsample_dtype=int64, nsample_shape=(3, 2), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_7_{nbad_shape=(), ngood_shape=(), nsample_dtype=int64, nsample_shape=(3, 2), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_8_{nbad_shape=(), ngood_shape=(3, 2), nsample_dtype=int32, nsample_shape=(), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_9_{nbad_shape=(), ngood_shape=(3, 2), nsample_dtype=int32, nsample_shape=(), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_10_{nbad_shape=(), ngood_shape=(3, 2), nsample_dtype=int32, nsample_shape=(3, 2), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_11_{nbad_shape=(), ngood_shape=(3, 2), nsample_dtype=int32, nsample_shape=(3, 2), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_12_{nbad_shape=(), ngood_shape=(3, 2), nsample_dtype=int64, nsample_shape=(), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_13_{nbad_shape=(), ngood_shape=(3, 2), nsample_dtype=int64, nsample_shape=(), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_14_{nbad_shape=(), ngood_shape=(3, 2), nsample_dtype=int64, nsample_shape=(3, 2), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_15_{nbad_shape=(), ngood_shape=(3, 2), nsample_dtype=int64, nsample_shape=(3, 2), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_16_{nbad_shape=(3, 2), ngood_shape=(), nsample_dtype=int32, nsample_shape=(), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_17_{nbad_shape=(3, 2), ngood_shape=(), nsample_dtype=int32, nsample_shape=(), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_18_{nbad_shape=(3, 2), ngood_shape=(), nsample_dtype=int32, nsample_shape=(3, 2), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_19_{nbad_shape=(3, 2), ngood_shape=(), nsample_dtype=int32, nsample_shape=(3, 2), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_20_{nbad_shape=(3, 2), ngood_shape=(), nsample_dtype=int64, nsample_shape=(), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_21_{nbad_shape=(3, 2), ngood_shape=(), nsample_dtype=int64, nsample_shape=(), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_22_{nbad_shape=(3, 2), ngood_shape=(), nsample_dtype=int64, nsample_shape=(3, 2), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_23_{nbad_shape=(3, 2), ngood_shape=(), nsample_dtype=int64, nsample_shape=(3, 2), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_24_{nbad_shape=(3, 2), ngood_shape=(3, 2), nsample_dtype=int32, nsample_shape=(), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_25_{nbad_shape=(3, 2), ngood_shape=(3, 2), nsample_dtype=int32, nsample_shape=(), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_26_{nbad_shape=(3, 2), ngood_shape=(3, 2), nsample_dtype=int32, nsample_shape=(3, 2), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_27_{nbad_shape=(3, 2), ngood_shape=(3, 2), nsample_dtype=int32, nsample_shape=(3, 2), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_28_{nbad_shape=(3, 2), ngood_shape=(3, 2), nsample_dtype=int64, nsample_shape=(), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_29_{nbad_shape=(3, 2), ngood_shape=(3, 2), nsample_dtype=int64, nsample_shape=(), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_30_{nbad_shape=(3, 2), ngood_shape=(3, 2), nsample_dtype=int64, nsample_shape=(3, 2), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_31_{nbad_shape=(3, 2), ngood_shape=(3, 2), nsample_dtype=int64, nsample_shape=(3, 2), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGumbel_param_0_{loc_shape=(), scale_shape=(), shape=(4, 3, 2)}::test_gumbel -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGumbel_param_1_{loc_shape=(), scale_shape=(), shape=(3, 2)}::test_gumbel -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGumbel_param_2_{loc_shape=(), scale_shape=(3, 2), shape=(4, 3, 2)}::test_gumbel -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGumbel_param_3_{loc_shape=(), scale_shape=(3, 2), shape=(3, 2)}::test_gumbel -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGumbel_param_4_{loc_shape=(3, 2), scale_shape=(), shape=(4, 3, 2)}::test_gumbel -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGumbel_param_5_{loc_shape=(3, 2), scale_shape=(), shape=(3, 2)}::test_gumbel -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsLogistic_param_0_{loc_shape=(), scale_shape=(), shape=(4, 3, 2)}::test_logistic -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsLogistic_param_1_{loc_shape=(), scale_shape=(), shape=(3, 2)}::test_logistic -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsLogistic_param_2_{loc_shape=(), scale_shape=(3, 2), shape=(4, 3, 2)}::test_logistic -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsLogistic_param_3_{loc_shape=(), scale_shape=(3, 2), shape=(3, 2)}::test_logistic -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsLogistic_param_4_{loc_shape=(3, 2), scale_shape=(), shape=(4, 3, 2)}::test_logistic -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsLogistic_param_5_{loc_shape=(3, 2), scale_shape=(), shape=(3, 2)}::test_logistic -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsLognormal_param_0_{mean_shape=(), shape=(4, 3, 2), sigma_shape=()}::test_lognormal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsLognormal_param_1_{mean_shape=(), shape=(3, 2), sigma_shape=()}::test_lognormal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsLogseries_param_0_{p_shape=(), shape=(4, 3, 2)}::test_logseries -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsLogseries_param_0_{p_shape=(), shape=(4, 3, 2)}::test_logseries_for_invalid_p -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsLogseries_param_1_{p_shape=(), shape=(3, 2)}::test_logseries -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsLogseries_param_1_{p_shape=(), shape=(3, 2)}::test_logseries_for_invalid_p -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsMultivariateNormal_param_0_{d=2, shape=(4, 3, 2)}::test_normal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsMultivariateNormal_param_1_{d=2, shape=(3, 2)}::test_normal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsMultivariateNormal_param_2_{d=4, shape=(4, 3, 2)}::test_normal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsMultivariateNormal_param_3_{d=4, shape=(3, 2)}::test_normal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNegativeBinomial_param_0_{n_shape=(), p_shape=(), shape=(4, 3, 2)}::test_negative_binomial -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNegativeBinomial_param_0_{n_shape=(), p_shape=(), shape=(4, 3, 2)}::test_negative_binomial_for_noninteger_n -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNegativeBinomial_param_1_{n_shape=(), p_shape=(), shape=(3, 2)}::test_negative_binomial -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNegativeBinomial_param_1_{n_shape=(), p_shape=(), shape=(3, 2)}::test_negative_binomial_for_noninteger_n -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNegativeBinomial_param_2_{n_shape=(), p_shape=(3, 2), shape=(4, 3, 2)}::test_negative_binomial -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNegativeBinomial_param_2_{n_shape=(), p_shape=(3, 2), shape=(4, 3, 2)}::test_negative_binomial_for_noninteger_n -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNegativeBinomial_param_3_{n_shape=(), p_shape=(3, 2), shape=(3, 2)}::test_negative_binomial -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNegativeBinomial_param_3_{n_shape=(), p_shape=(3, 2), shape=(3, 2)}::test_negative_binomial_for_noninteger_n -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNegativeBinomial_param_4_{n_shape=(3, 2), p_shape=(), shape=(4, 3, 2)}::test_negative_binomial -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNegativeBinomial_param_4_{n_shape=(3, 2), p_shape=(), shape=(4, 3, 2)}::test_negative_binomial_for_noninteger_n -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNegativeBinomial_param_5_{n_shape=(3, 2), p_shape=(), shape=(3, 2)}::test_negative_binomial -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNegativeBinomial_param_5_{n_shape=(3, 2), p_shape=(), shape=(3, 2)}::test_negative_binomial_for_noninteger_n -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralChisquare_param_0_{df_shape=(), nonc_shape=(), shape=(4, 3, 2)}::test_noncentral_chisquare -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralChisquare_param_1_{df_shape=(), nonc_shape=(), shape=(3, 2)}::test_noncentral_chisquare -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralChisquare_param_2_{df_shape=(), nonc_shape=(3, 2), shape=(4, 3, 2)}::test_noncentral_chisquare -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralChisquare_param_3_{df_shape=(), nonc_shape=(3, 2), shape=(3, 2)}::test_noncentral_chisquare -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralChisquare_param_4_{df_shape=(3, 2), nonc_shape=(), shape=(4, 3, 2)}::test_noncentral_chisquare -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralChisquare_param_5_{df_shape=(3, 2), nonc_shape=(), shape=(3, 2)}::test_noncentral_chisquare -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_0_{dfden_shape=(), dfnum_shape=(), nonc_shape=(), shape=(4, 3, 2)}::test_noncentral_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_10_{dfden_shape=(3, 2), dfnum_shape=(), nonc_shape=(3, 2), shape=(4, 3, 2)}::test_noncentral_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_11_{dfden_shape=(3, 2), dfnum_shape=(), nonc_shape=(3, 2), shape=(3, 2)}::test_noncentral_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_12_{dfden_shape=(3, 2), dfnum_shape=(3, 2), nonc_shape=(), shape=(4, 3, 2)}::test_noncentral_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_13_{dfden_shape=(3, 2), dfnum_shape=(3, 2), nonc_shape=(), shape=(3, 2)}::test_noncentral_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_1_{dfden_shape=(), dfnum_shape=(), nonc_shape=(), shape=(3, 2)}::test_noncentral_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_2_{dfden_shape=(), dfnum_shape=(), nonc_shape=(3, 2), shape=(4, 3, 2)}::test_noncentral_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_3_{dfden_shape=(), dfnum_shape=(), nonc_shape=(3, 2), shape=(3, 2)}::test_noncentral_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_4_{dfden_shape=(), dfnum_shape=(3, 2), nonc_shape=(), shape=(4, 3, 2)}::test_noncentral_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_5_{dfden_shape=(), dfnum_shape=(3, 2), nonc_shape=(), shape=(3, 2)}::test_noncentral_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_6_{dfden_shape=(), dfnum_shape=(3, 2), nonc_shape=(3, 2), shape=(4, 3, 2)}::test_noncentral_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_7_{dfden_shape=(), dfnum_shape=(3, 2), nonc_shape=(3, 2), shape=(3, 2)}::test_noncentral_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_8_{dfden_shape=(3, 2), dfnum_shape=(), nonc_shape=(), shape=(4, 3, 2)}::test_noncentral_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_9_{dfden_shape=(3, 2), dfnum_shape=(), nonc_shape=(), shape=(3, 2)}::test_noncentral_f - -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsPoisson_param_0_{lam_shape=(), shape=(4, 3, 2)}::test_poisson -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsPoisson_param_1_{lam_shape=(), shape=(3, 2)}::test_poisson -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsPoisson_param_2_{lam_shape=(3, 2), shape=(4, 3, 2)}::test_poisson -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsPoisson_param_3_{lam_shape=(3, 2), shape=(3, 2)}::test_poisson -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsPower_param_0_{a_shape=(), shape=(4, 3, 2)}::test_power -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsPower_param_0_{a_shape=(), shape=(4, 3, 2)}::test_power_for_negative_a -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsPower_param_1_{a_shape=(), shape=(3, 2)}::test_power -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsPower_param_1_{a_shape=(), shape=(3, 2)}::test_power_for_negative_a -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsRayleigh_param_0_{scale_shape=(), shape=(4, 3, 2)}::test_rayleigh -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsRayleigh_param_0_{scale_shape=(), shape=(4, 3, 2)}::test_rayleigh_for_negative_scale -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsRayleigh_param_0_{scale_shape=(), shape=(4, 3, 2)}::test_rayleigh_for_zero_scale -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsRayleigh_param_1_{scale_shape=(), shape=(3, 2)}::test_rayleigh -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsRayleigh_param_1_{scale_shape=(), shape=(3, 2)}::test_rayleigh_for_negative_scale -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsRayleigh_param_1_{scale_shape=(), shape=(3, 2)}::test_rayleigh_for_zero_scale -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsStandardGamma_param_0_{shape=(4, 3, 2), shape_shape=()}::test_standard_gamma -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsStandardGamma_param_2_{shape=(3, 2), shape_shape=()}::test_standard_gamma -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_0_{left_shape=(), mode_shape=(), right_shape=(), shape=(4, 3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_10_{left_shape=(3, 2), mode_shape=(), right_shape=(3, 2), shape=(4, 3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_11_{left_shape=(3, 2), mode_shape=(), right_shape=(3, 2), shape=(3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_12_{left_shape=(3, 2), mode_shape=(3, 2), right_shape=(), shape=(4, 3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_13_{left_shape=(3, 2), mode_shape=(3, 2), right_shape=(), shape=(3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_1_{left_shape=(), mode_shape=(), right_shape=(), shape=(3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_2_{left_shape=(), mode_shape=(), right_shape=(3, 2), shape=(4, 3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_3_{left_shape=(), mode_shape=(), right_shape=(3, 2), shape=(3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_4_{left_shape=(), mode_shape=(3, 2), right_shape=(), shape=(4, 3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_5_{left_shape=(), mode_shape=(3, 2), right_shape=(), shape=(3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_6_{left_shape=(), mode_shape=(3, 2), right_shape=(3, 2), shape=(4, 3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_7_{left_shape=(), mode_shape=(3, 2), right_shape=(3, 2), shape=(3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_8_{left_shape=(3, 2), mode_shape=(), right_shape=(), shape=(4, 3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_9_{left_shape=(3, 2), mode_shape=(), right_shape=(), shape=(3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsuLaplace_param_0_{loc_shape=(), scale_shape=(), shape=(4, 3, 2)}::test_laplace -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsuLaplace_param_1_{loc_shape=(), scale_shape=(), shape=(3, 2)}::test_laplace -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsuLaplace_param_2_{loc_shape=(), scale_shape=(3, 2), shape=(4, 3, 2)}::test_laplace -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsuLaplace_param_3_{loc_shape=(), scale_shape=(3, 2), shape=(3, 2)}::test_laplace -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsuLaplace_param_4_{loc_shape=(3, 2), scale_shape=(), shape=(4, 3, 2)}::test_laplace -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsuLaplace_param_5_{loc_shape=(3, 2), scale_shape=(), shape=(3, 2)}::test_laplace -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsUniform_param_0_{high_shape=(), low_shape=(), shape=(4, 3, 2)}::test_uniform -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsUniform_param_1_{high_shape=(), low_shape=(), shape=(3, 2)}::test_uniform -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsUniform_param_4_{high_shape=(3, 2), low_shape=(), shape=(4, 3, 2)}::test_uniform -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsUniform_param_5_{high_shape=(3, 2), low_shape=(), shape=(3, 2)}::test_uniform -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWald_param_0_{mean_shape=(), scale_shape=(), shape=(4, 3, 2)}::test_wald -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWald_param_1_{mean_shape=(), scale_shape=(), shape=(3, 2)}::test_wald -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWald_param_2_{mean_shape=(), scale_shape=(3, 2), shape=(4, 3, 2)}::test_wald -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWald_param_3_{mean_shape=(), scale_shape=(3, 2), shape=(3, 2)}::test_wald -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWald_param_4_{mean_shape=(3, 2), scale_shape=(), shape=(4, 3, 2)}::test_wald -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWald_param_5_{mean_shape=(3, 2), scale_shape=(), shape=(3, 2)}::test_wald -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWeibull_param_0_{a_shape=(), shape=(4, 3, 2)}::test_weibull -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWeibull_param_0_{a_shape=(), shape=(4, 3, 2)}::test_weibull_for_inf_a -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWeibull_param_0_{a_shape=(), shape=(4, 3, 2)}::test_weibull_for_negative_a -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWeibull_param_1_{a_shape=(), shape=(3, 2)}::test_weibull -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWeibull_param_1_{a_shape=(), shape=(3, 2)}::test_weibull_for_inf_a -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWeibull_param_1_{a_shape=(), shape=(3, 2)}::test_weibull_for_negative_a -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsZipf_param_0_{a_shape=(), shape=(4, 3, 2)}::test_zipf -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsZipf_param_1_{a_shape=(), shape=(3, 2)}::test_zipf - -tests/third_party/cupy/random_tests/test_sample.py::TestChoice::test_no_none -tests/third_party/cupy/random_tests/test_sample.py::TestChoice::test_p_is_none -tests/third_party/cupy/random_tests/test_sample.py::TestChoice::test_replace_and_p_are_none -tests/third_party/cupy/random_tests/test_sample.py::TestChoice::test_replace_is_none -tests/third_party/cupy/random_tests/test_sample.py::TestChoice::test_size_and_p_are_none -tests/third_party/cupy/random_tests/test_sample.py::TestChoice::test_size_and_replace_and_p_are_none -tests/third_party/cupy/random_tests/test_sample.py::TestChoice::test_size_and_replace_are_none -tests/third_party/cupy/random_tests/test_sample.py::TestChoice::test_size_is_none -tests/third_party/cupy/random_tests/test_sample.py::TestMultinomial_param_0_{size=None}::test_multinomial -tests/third_party/cupy/random_tests/test_sample.py::TestMultinomial_param_1_{size=()}::test_multinomial -tests/third_party/cupy/random_tests/test_sample.py::TestMultinomial_param_2_{size=4}::test_multinomial -tests/third_party/cupy/random_tests/test_sample.py::TestMultinomial_param_3_{size=(0,)}::test_multinomial -tests/third_party/cupy/random_tests/test_sample.py::TestMultinomial_param_4_{size=(1, 0)}::test_multinomial -tests/third_party/cupy/random_tests/test_sample.py::TestRandint2::test_bound_float1 -tests/third_party/cupy/random_tests/test_sample.py::TestRandint2::test_goodness_of_fit -tests/third_party/cupy/random_tests/test_sample.py::TestRandint2::test_goodness_of_fit_2 -tests/third_party/cupy/random_tests/test_sample.py::TestRandomIntegers2::test_bound_1 -tests/third_party/cupy/random_tests/test_sample.py::TestRandomIntegers2::test_bound_2 -tests/third_party/cupy/random_tests/test_sample.py::TestRandomIntegers2::test_goodness_of_fit -tests/third_party/cupy/random_tests/test_sample.py::TestRandomIntegers2::test_goodness_of_fit_2 diff --git a/tests/skipped_tests_gpu.tbl b/tests/skipped_tests_gpu.tbl index 233c6b568f8..2dbca0925f2 100644 --- a/tests/skipped_tests_gpu.tbl +++ b/tests/skipped_tests_gpu.tbl @@ -25,56 +25,12 @@ tests/test_umath.py::test_umaths[('floor_divide', 'ff')] tests/test_umath.py::test_umaths[('frexp', 'f')] tests/test_umath.py::test_umaths[('frexp', 'd')] -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGeometric_param_2_{p_shape=(3, 2), shape=(4, 3, 2)}::test_geometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGeometric_param_3_{p_shape=(3, 2), shape=(3, 2)}::test_geometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_0_{nbad_shape=(), ngood_shape=(), nsample_dtype=int32, nsample_shape=(), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_1_{nbad_shape=(), ngood_shape=(), nsample_dtype=int32, nsample_shape=(), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_2_{nbad_shape=(), ngood_shape=(), nsample_dtype=int32, nsample_shape=(3, 2), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_3_{nbad_shape=(), ngood_shape=(), nsample_dtype=int32, nsample_shape=(3, 2), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_4_{nbad_shape=(), ngood_shape=(), nsample_dtype=int64, nsample_shape=(), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_5_{nbad_shape=(), ngood_shape=(), nsample_dtype=int64, nsample_shape=(), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_6_{nbad_shape=(), ngood_shape=(), nsample_dtype=int64, nsample_shape=(3, 2), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_7_{nbad_shape=(), ngood_shape=(), nsample_dtype=int64, nsample_shape=(3, 2), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_8_{nbad_shape=(), ngood_shape=(3, 2), nsample_dtype=int32, nsample_shape=(), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_9_{nbad_shape=(), ngood_shape=(3, 2), nsample_dtype=int32, nsample_shape=(), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_10_{nbad_shape=(), ngood_shape=(3, 2), nsample_dtype=int32, nsample_shape=(3, 2), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_11_{nbad_shape=(), ngood_shape=(3, 2), nsample_dtype=int32, nsample_shape=(3, 2), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_12_{nbad_shape=(), ngood_shape=(3, 2), nsample_dtype=int64, nsample_shape=(), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_13_{nbad_shape=(), ngood_shape=(3, 2), nsample_dtype=int64, nsample_shape=(), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_14_{nbad_shape=(), ngood_shape=(3, 2), nsample_dtype=int64, nsample_shape=(3, 2), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_15_{nbad_shape=(), ngood_shape=(3, 2), nsample_dtype=int64, nsample_shape=(3, 2), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_16_{nbad_shape=(3, 2), ngood_shape=(), nsample_dtype=int32, nsample_shape=(), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_17_{nbad_shape=(3, 2), ngood_shape=(), nsample_dtype=int32, nsample_shape=(), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_18_{nbad_shape=(3, 2), ngood_shape=(), nsample_dtype=int32, nsample_shape=(3, 2), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_19_{nbad_shape=(3, 2), ngood_shape=(), nsample_dtype=int32, nsample_shape=(3, 2), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_20_{nbad_shape=(3, 2), ngood_shape=(), nsample_dtype=int64, nsample_shape=(), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_21_{nbad_shape=(3, 2), ngood_shape=(), nsample_dtype=int64, nsample_shape=(), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_22_{nbad_shape=(3, 2), ngood_shape=(), nsample_dtype=int64, nsample_shape=(3, 2), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_23_{nbad_shape=(3, 2), ngood_shape=(), nsample_dtype=int64, nsample_shape=(3, 2), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_24_{nbad_shape=(3, 2), ngood_shape=(3, 2), nsample_dtype=int32, nsample_shape=(), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_25_{nbad_shape=(3, 2), ngood_shape=(3, 2), nsample_dtype=int32, nsample_shape=(), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_26_{nbad_shape=(3, 2), ngood_shape=(3, 2), nsample_dtype=int32, nsample_shape=(3, 2), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_27_{nbad_shape=(3, 2), ngood_shape=(3, 2), nsample_dtype=int32, nsample_shape=(3, 2), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_28_{nbad_shape=(3, 2), ngood_shape=(3, 2), nsample_dtype=int64, nsample_shape=(), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_29_{nbad_shape=(3, 2), ngood_shape=(3, 2), nsample_dtype=int64, nsample_shape=(), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_30_{nbad_shape=(3, 2), ngood_shape=(3, 2), nsample_dtype=int64, nsample_shape=(3, 2), shape=(4, 3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsHyperGeometric_param_31_{nbad_shape=(3, 2), ngood_shape=(3, 2), nsample_dtype=int64, nsample_shape=(3, 2), shape=(3, 2)}::test_hypergeometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsPoisson_param_0_{lam_shape=(), shape=(4, 3, 2)}::test_poisson -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsPoisson_param_1_{lam_shape=(), shape=(3, 2)}::test_poisson -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsPoisson_param_2_{lam_shape=(3, 2), shape=(4, 3, 2)}::test_poisson -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsPoisson_param_3_{lam_shape=(3, 2), shape=(3, 2)}::test_poisson - tests/third_party/cupy/core_tests/test_ndarray_conversion.py::TestNdarrayToBytes_param_0_{shape=()}::test_item tests/third_party/cupy/core_tests/test_ndarray_conversion.py::TestNdarrayToBytes_param_1_{shape=(1,)}::test_item tests/third_party/cupy/core_tests/test_ndarray_conversion.py::TestNdarrayToBytes_param_2_{shape=(2, 3)}::test_item tests/third_party/cupy/core_tests/test_ndarray_conversion.py::TestNdarrayToBytes_param_3_{order='C', shape=(2, 3)}::test_item tests/third_party/cupy/core_tests/test_ndarray_conversion.py::TestNdarrayToBytes_param_4_{order='F', shape=(2, 3)}::test_item -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsMultivariateNormal_param_0_{d=2, shape=(4, 3, 2)}::test_normal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsMultivariateNormal_param_1_{d=2, shape=(3, 2)}::test_normal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsMultivariateNormal_param_2_{d=4, shape=(4, 3, 2)}::test_normal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsMultivariateNormal_param_3_{d=4, shape=(3, 2)}::test_normal - tests/third_party/intel/test_zero_copy_test1.py::test_dpnp_interaction_with_dpctl_memory tests/third_party/cupy/core_tests/test_ndarray_copy_and_view.py::TestArrayFlatten::test_flatten_order @@ -186,136 +142,3 @@ tests/third_party/cupy/math_tests/test_misc.py::TestMisc::test_interp_inf_fx tests/third_party/cupy/math_tests/test_misc.py::TestMisc::test_interp_inf_x tests/third_party/cupy/math_tests/test_misc.py::TestMisc::test_interp_size1 tests/third_party/cupy/math_tests/test_misc.py::TestMisc::test_interp_inf_to_nan - -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsBeta_param_0_{a_shape=(), b_shape=(), shape=(4, 3, 2)}::test_beta -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsBeta_param_1_{a_shape=(), b_shape=(), shape=(3, 2)}::test_beta -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsBeta_param_2_{a_shape=(), b_shape=(3, 2), shape=(4, 3, 2)}::test_beta -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsBeta_param_3_{a_shape=(), b_shape=(3, 2), shape=(3, 2)}::test_beta -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsBeta_param_4_{a_shape=(3, 2), b_shape=(), shape=(4, 3, 2)}::test_beta -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsBeta_param_5_{a_shape=(3, 2), b_shape=(), shape=(3, 2)}::test_beta -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsExponential_param_0_{scale_shape=(), shape=(4, 3, 2)}::test_exponential -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsExponential_param_1_{scale_shape=(), shape=(3, 2)}::test_exponential -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsExponential_param_2_{scale_shape=(), shape=None}::test_exponential -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGeometric_param_0_{p_shape=(), shape=(4, 3, 2)}::test_geometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGeometric_param_1_{p_shape=(), shape=(3, 2)}::test_geometric -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGumbel_param_0_{loc_shape=(), scale_shape=(), shape=(4, 3, 2)}::test_gumbel -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGumbel_param_1_{loc_shape=(), scale_shape=(), shape=(3, 2)}::test_gumbel -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGumbel_param_2_{loc_shape=(), scale_shape=(3, 2), shape=(4, 3, 2)}::test_gumbel -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGumbel_param_3_{loc_shape=(), scale_shape=(3, 2), shape=(3, 2)}::test_gumbel -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGumbel_param_4_{loc_shape=(3, 2), scale_shape=(), shape=(4, 3, 2)}::test_gumbel -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGumbel_param_5_{loc_shape=(3, 2), scale_shape=(), shape=(3, 2)}::test_gumbel -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsLogistic_param_0_{loc_shape=(), scale_shape=(), shape=(4, 3, 2)}::test_logistic -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsLogistic_param_1_{loc_shape=(), scale_shape=(), shape=(3, 2)}::test_logistic -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsLogistic_param_2_{loc_shape=(), scale_shape=(3, 2), shape=(4, 3, 2)}::test_logistic -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsLogistic_param_3_{loc_shape=(), scale_shape=(3, 2), shape=(3, 2)}::test_logistic -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsLogistic_param_4_{loc_shape=(3, 2), scale_shape=(), shape=(4, 3, 2)}::test_logistic -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsLogistic_param_5_{loc_shape=(3, 2), scale_shape=(), shape=(3, 2)}::test_logistic -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsLognormal_param_0_{mean_shape=(), shape=(4, 3, 2), sigma_shape=()}::test_lognormal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsLognormal_param_1_{mean_shape=(), shape=(3, 2), sigma_shape=()}::test_lognormal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsLogseries_param_0_{p_shape=(), shape=(4, 3, 2)}::test_logseries -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsLogseries_param_0_{p_shape=(), shape=(4, 3, 2)}::test_logseries_for_invalid_p -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsLogseries_param_1_{p_shape=(), shape=(3, 2)}::test_logseries -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsLogseries_param_1_{p_shape=(), shape=(3, 2)}::test_logseries_for_invalid_p -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNegativeBinomial_param_0_{n_shape=(), p_shape=(), shape=(4, 3, 2)}::test_negative_binomial -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNegativeBinomial_param_0_{n_shape=(), p_shape=(), shape=(4, 3, 2)}::test_negative_binomial_for_noninteger_n -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNegativeBinomial_param_1_{n_shape=(), p_shape=(), shape=(3, 2)}::test_negative_binomial -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNegativeBinomial_param_1_{n_shape=(), p_shape=(), shape=(3, 2)}::test_negative_binomial_for_noninteger_n -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNegativeBinomial_param_2_{n_shape=(), p_shape=(3, 2), shape=(4, 3, 2)}::test_negative_binomial -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNegativeBinomial_param_2_{n_shape=(), p_shape=(3, 2), shape=(4, 3, 2)}::test_negative_binomial_for_noninteger_n -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNegativeBinomial_param_3_{n_shape=(), p_shape=(3, 2), shape=(3, 2)}::test_negative_binomial -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNegativeBinomial_param_3_{n_shape=(), p_shape=(3, 2), shape=(3, 2)}::test_negative_binomial_for_noninteger_n -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNegativeBinomial_param_4_{n_shape=(3, 2), p_shape=(), shape=(4, 3, 2)}::test_negative_binomial -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNegativeBinomial_param_4_{n_shape=(3, 2), p_shape=(), shape=(4, 3, 2)}::test_negative_binomial_for_noninteger_n -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNegativeBinomial_param_5_{n_shape=(3, 2), p_shape=(), shape=(3, 2)}::test_negative_binomial -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNegativeBinomial_param_5_{n_shape=(3, 2), p_shape=(), shape=(3, 2)}::test_negative_binomial_for_noninteger_n -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralChisquare_param_0_{df_shape=(), nonc_shape=(), shape=(4, 3, 2)}::test_noncentral_chisquare -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralChisquare_param_1_{df_shape=(), nonc_shape=(), shape=(3, 2)}::test_noncentral_chisquare -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralChisquare_param_2_{df_shape=(), nonc_shape=(3, 2), shape=(4, 3, 2)}::test_noncentral_chisquare -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralChisquare_param_3_{df_shape=(), nonc_shape=(3, 2), shape=(3, 2)}::test_noncentral_chisquare -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralChisquare_param_4_{df_shape=(3, 2), nonc_shape=(), shape=(4, 3, 2)}::test_noncentral_chisquare -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralChisquare_param_5_{df_shape=(3, 2), nonc_shape=(), shape=(3, 2)}::test_noncentral_chisquare -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_0_{dfden_shape=(), dfnum_shape=(), nonc_shape=(), shape=(4, 3, 2)}::test_noncentral_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_10_{dfden_shape=(3, 2), dfnum_shape=(), nonc_shape=(3, 2), shape=(4, 3, 2)}::test_noncentral_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_11_{dfden_shape=(3, 2), dfnum_shape=(), nonc_shape=(3, 2), shape=(3, 2)}::test_noncentral_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_12_{dfden_shape=(3, 2), dfnum_shape=(3, 2), nonc_shape=(), shape=(4, 3, 2)}::test_noncentral_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_13_{dfden_shape=(3, 2), dfnum_shape=(3, 2), nonc_shape=(), shape=(3, 2)}::test_noncentral_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_1_{dfden_shape=(), dfnum_shape=(), nonc_shape=(), shape=(3, 2)}::test_noncentral_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_2_{dfden_shape=(), dfnum_shape=(), nonc_shape=(3, 2), shape=(4, 3, 2)}::test_noncentral_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_3_{dfden_shape=(), dfnum_shape=(), nonc_shape=(3, 2), shape=(3, 2)}::test_noncentral_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_4_{dfden_shape=(), dfnum_shape=(3, 2), nonc_shape=(), shape=(4, 3, 2)}::test_noncentral_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_5_{dfden_shape=(), dfnum_shape=(3, 2), nonc_shape=(), shape=(3, 2)}::test_noncentral_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_6_{dfden_shape=(), dfnum_shape=(3, 2), nonc_shape=(3, 2), shape=(4, 3, 2)}::test_noncentral_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_7_{dfden_shape=(), dfnum_shape=(3, 2), nonc_shape=(3, 2), shape=(3, 2)}::test_noncentral_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_8_{dfden_shape=(3, 2), dfnum_shape=(), nonc_shape=(), shape=(4, 3, 2)}::test_noncentral_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_9_{dfden_shape=(3, 2), dfnum_shape=(), nonc_shape=(), shape=(3, 2)}::test_noncentral_f - -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsPower_param_0_{a_shape=(), shape=(4, 3, 2)}::test_power -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsPower_param_0_{a_shape=(), shape=(4, 3, 2)}::test_power_for_negative_a -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsPower_param_1_{a_shape=(), shape=(3, 2)}::test_power -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsPower_param_1_{a_shape=(), shape=(3, 2)}::test_power_for_negative_a -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsRayleigh_param_0_{scale_shape=(), shape=(4, 3, 2)}::test_rayleigh -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsRayleigh_param_0_{scale_shape=(), shape=(4, 3, 2)}::test_rayleigh_for_negative_scale -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsRayleigh_param_0_{scale_shape=(), shape=(4, 3, 2)}::test_rayleigh_for_zero_scale -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsRayleigh_param_1_{scale_shape=(), shape=(3, 2)}::test_rayleigh -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsRayleigh_param_1_{scale_shape=(), shape=(3, 2)}::test_rayleigh_for_negative_scale -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsRayleigh_param_1_{scale_shape=(), shape=(3, 2)}::test_rayleigh_for_zero_scale -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsStandardGamma_param_0_{shape=(4, 3, 2), shape_shape=()}::test_standard_gamma -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsStandardGamma_param_2_{shape=(3, 2), shape_shape=()}::test_standard_gamma -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_0_{left_shape=(), mode_shape=(), right_shape=(), shape=(4, 3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_10_{left_shape=(3, 2), mode_shape=(), right_shape=(3, 2), shape=(4, 3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_11_{left_shape=(3, 2), mode_shape=(), right_shape=(3, 2), shape=(3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_12_{left_shape=(3, 2), mode_shape=(3, 2), right_shape=(), shape=(4, 3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_13_{left_shape=(3, 2), mode_shape=(3, 2), right_shape=(), shape=(3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_1_{left_shape=(), mode_shape=(), right_shape=(), shape=(3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_2_{left_shape=(), mode_shape=(), right_shape=(3, 2), shape=(4, 3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_3_{left_shape=(), mode_shape=(), right_shape=(3, 2), shape=(3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_4_{left_shape=(), mode_shape=(3, 2), right_shape=(), shape=(4, 3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_5_{left_shape=(), mode_shape=(3, 2), right_shape=(), shape=(3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_6_{left_shape=(), mode_shape=(3, 2), right_shape=(3, 2), shape=(4, 3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_7_{left_shape=(), mode_shape=(3, 2), right_shape=(3, 2), shape=(3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_8_{left_shape=(3, 2), mode_shape=(), right_shape=(), shape=(4, 3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_9_{left_shape=(3, 2), mode_shape=(), right_shape=(), shape=(3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsuLaplace_param_0_{loc_shape=(), scale_shape=(), shape=(4, 3, 2)}::test_laplace -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsuLaplace_param_1_{loc_shape=(), scale_shape=(), shape=(3, 2)}::test_laplace -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsuLaplace_param_2_{loc_shape=(), scale_shape=(3, 2), shape=(4, 3, 2)}::test_laplace -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsuLaplace_param_3_{loc_shape=(), scale_shape=(3, 2), shape=(3, 2)}::test_laplace -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsuLaplace_param_4_{loc_shape=(3, 2), scale_shape=(), shape=(4, 3, 2)}::test_laplace -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsuLaplace_param_5_{loc_shape=(3, 2), scale_shape=(), shape=(3, 2)}::test_laplace -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsUniform_param_0_{high_shape=(), low_shape=(), shape=(4, 3, 2)}::test_uniform -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsUniform_param_1_{high_shape=(), low_shape=(), shape=(3, 2)}::test_uniform -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsUniform_param_4_{high_shape=(3, 2), low_shape=(), shape=(4, 3, 2)}::test_uniform -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsUniform_param_5_{high_shape=(3, 2), low_shape=(), shape=(3, 2)}::test_uniform -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWald_param_0_{mean_shape=(), scale_shape=(), shape=(4, 3, 2)}::test_wald -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWald_param_1_{mean_shape=(), scale_shape=(), shape=(3, 2)}::test_wald -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWald_param_2_{mean_shape=(), scale_shape=(3, 2), shape=(4, 3, 2)}::test_wald -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWald_param_3_{mean_shape=(), scale_shape=(3, 2), shape=(3, 2)}::test_wald -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWald_param_4_{mean_shape=(3, 2), scale_shape=(), shape=(4, 3, 2)}::test_wald -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWald_param_5_{mean_shape=(3, 2), scale_shape=(), shape=(3, 2)}::test_wald -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWeibull_param_0_{a_shape=(), shape=(4, 3, 2)}::test_weibull -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWeibull_param_0_{a_shape=(), shape=(4, 3, 2)}::test_weibull_for_inf_a -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWeibull_param_0_{a_shape=(), shape=(4, 3, 2)}::test_weibull_for_negative_a -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWeibull_param_1_{a_shape=(), shape=(3, 2)}::test_weibull -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWeibull_param_1_{a_shape=(), shape=(3, 2)}::test_weibull_for_inf_a -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWeibull_param_1_{a_shape=(), shape=(3, 2)}::test_weibull_for_negative_a -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsZipf_param_0_{a_shape=(), shape=(4, 3, 2)}::test_zipf -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsZipf_param_1_{a_shape=(), shape=(3, 2)}::test_zipf -tests/third_party/cupy/random_tests/test_sample.py::TestChoice::test_no_none -tests/third_party/cupy/random_tests/test_sample.py::TestChoice::test_p_is_none -tests/third_party/cupy/random_tests/test_sample.py::TestChoice::test_replace_and_p_are_none -tests/third_party/cupy/random_tests/test_sample.py::TestChoice::test_replace_is_none -tests/third_party/cupy/random_tests/test_sample.py::TestChoice::test_size_and_p_are_none -tests/third_party/cupy/random_tests/test_sample.py::TestChoice::test_size_and_replace_and_p_are_none -tests/third_party/cupy/random_tests/test_sample.py::TestChoice::test_size_and_replace_are_none -tests/third_party/cupy/random_tests/test_sample.py::TestChoice::test_size_is_none -tests/third_party/cupy/random_tests/test_sample.py::TestMultinomial_param_0_{size=None}::test_multinomial -tests/third_party/cupy/random_tests/test_sample.py::TestMultinomial_param_1_{size=()}::test_multinomial -tests/third_party/cupy/random_tests/test_sample.py::TestMultinomial_param_2_{size=4}::test_multinomial -tests/third_party/cupy/random_tests/test_sample.py::TestMultinomial_param_3_{size=(0,)}::test_multinomial -tests/third_party/cupy/random_tests/test_sample.py::TestMultinomial_param_4_{size=(1, 0)}::test_multinomial -tests/third_party/cupy/random_tests/test_sample.py::TestRandint2::test_bound_float1 -tests/third_party/cupy/random_tests/test_sample.py::TestRandint2::test_goodness_of_fit -tests/third_party/cupy/random_tests/test_sample.py::TestRandint2::test_goodness_of_fit_2 -tests/third_party/cupy/random_tests/test_sample.py::TestRandomIntegers2::test_bound_1 -tests/third_party/cupy/random_tests/test_sample.py::TestRandomIntegers2::test_bound_2 -tests/third_party/cupy/random_tests/test_sample.py::TestRandomIntegers2::test_goodness_of_fit -tests/third_party/cupy/random_tests/test_sample.py::TestRandomIntegers2::test_goodness_of_fit_2 diff --git a/tests/skipped_tests_gpu_no_fp64.tbl b/tests/skipped_tests_gpu_no_fp64.tbl index 7cd2d8a1c15..2b751bbf3a8 100644 --- a/tests/skipped_tests_gpu_no_fp64.tbl +++ b/tests/skipped_tests_gpu_no_fp64.tbl @@ -1,172 +1 @@ tests/test_umath.py::test_umaths[('floor_divide', 'ff')] - -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsBeta_param_6_{a_shape=(3, 2), b_shape=(3, 2), shape=(4, 3, 2)}::test_beta -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsBeta_param_7_{a_shape=(3, 2), b_shape=(3, 2), shape=(3, 2)}::test_beta -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsChisquare_param_0_{df_shape=(), shape=(4, 3, 2)}::test_chisquare -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsChisquare_param_1_{df_shape=(), shape=(3, 2)}::test_chisquare -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsChisquare_param_2_{df_shape=(3, 2), shape=(4, 3, 2)}::test_chisquare -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsChisquare_param_3_{df_shape=(3, 2), shape=(3, 2)}::test_chisquare -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsDirichlet_param_0_{alpha_shape=(3,), shape=(4, 3, 2, 3)}::test_dirichlet -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsDirichlet_param_1_{alpha_shape=(3,), shape=(3, 2, 3)}::test_dirichlet -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsExponential_param_3_{scale_shape=(3, 2), shape=(4, 3, 2)}::test_exponential -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsExponential_param_4_{scale_shape=(3, 2), shape=(3, 2)}::test_exponential -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsExponential_param_5_{scale_shape=(3, 2), shape=None}::test_exponential -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsF_param_0_{dfden_shape=(), dfnum_shape=(), shape=(4, 3, 2)}::test_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsF_param_1_{dfden_shape=(), dfnum_shape=(), shape=(3, 2)}::test_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsF_param_2_{dfden_shape=(), dfnum_shape=(3, 2), shape=(4, 3, 2)}::test_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsF_param_3_{dfden_shape=(), dfnum_shape=(3, 2), shape=(3, 2)}::test_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsF_param_4_{dfden_shape=(3, 2), dfnum_shape=(), shape=(4, 3, 2)}::test_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsF_param_5_{dfden_shape=(3, 2), dfnum_shape=(), shape=(3, 2)}::test_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsF_param_6_{dfden_shape=(3, 2), dfnum_shape=(3, 2), shape=(4, 3, 2)}::test_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsF_param_7_{dfden_shape=(3, 2), dfnum_shape=(3, 2), shape=(3, 2)}::test_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGamma_param_0_{scale_shape=(), shape=(4, 3, 2), shape_shape=()}::test_gamma -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGamma_param_1_{scale_shape=(), shape=(4, 3, 2), shape_shape=(3, 2)}::test_gamma -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGamma_param_2_{scale_shape=(), shape=(3, 2), shape_shape=()}::test_gamma -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGamma_param_3_{scale_shape=(), shape=(3, 2), shape_shape=(3, 2)}::test_gamma -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGamma_param_4_{scale_shape=(3, 2), shape=(4, 3, 2), shape_shape=()}::test_gamma -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGamma_param_5_{scale_shape=(3, 2), shape=(4, 3, 2), shape_shape=(3, 2)}::test_gamma -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGamma_param_6_{scale_shape=(3, 2), shape=(3, 2), shape_shape=()}::test_gamma -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGamma_param_7_{scale_shape=(3, 2), shape=(3, 2), shape_shape=(3, 2)}::test_gamma -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGumbel_param_6_{loc_shape=(3, 2), scale_shape=(3, 2), shape=(4, 3, 2)}::test_gumbel -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGumbel_param_7_{loc_shape=(3, 2), scale_shape=(3, 2), shape=(3, 2)}::test_gumbel -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsuLaplace_param_6_{loc_shape=(3, 2), scale_shape=(3, 2), shape=(4, 3, 2)}::test_laplace -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsuLaplace_param_7_{loc_shape=(3, 2), scale_shape=(3, 2), shape=(3, 2)}::test_laplace -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsLogistic_param_6_{loc_shape=(3, 2), scale_shape=(3, 2), shape=(4, 3, 2)}::test_logistic -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsLogistic_param_7_{loc_shape=(3, 2), scale_shape=(3, 2), shape=(3, 2)}::test_logistic -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralChisquare_param_6_{df_shape=(3, 2), nonc_shape=(3, 2), shape=(4, 3, 2)}::test_noncentral_chisquare -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralChisquare_param_7_{df_shape=(3, 2), nonc_shape=(3, 2), shape=(3, 2)}::test_noncentral_chisquare -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_14_{dfden_shape=(3, 2), dfnum_shape=(3, 2), nonc_shape=(3, 2), shape=(4, 3, 2)}::test_noncentral_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_15_{dfden_shape=(3, 2), dfnum_shape=(3, 2), nonc_shape=(3, 2), shape=(3, 2)}::test_noncentral_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNormal_param_0_{loc_shape=(), scale_shape=(), shape=(4, 3, 2)}::test_normal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNormal_param_1_{loc_shape=(), scale_shape=(), shape=(3, 2)}::test_normal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNormal_param_2_{loc_shape=(), scale_shape=(3, 2), shape=(4, 3, 2)}::test_normal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNormal_param_3_{loc_shape=(), scale_shape=(3, 2), shape=(3, 2)}::test_normal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNormal_param_4_{loc_shape=(3, 2), scale_shape=(), shape=(4, 3, 2)}::test_normal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNormal_param_5_{loc_shape=(3, 2), scale_shape=(), shape=(3, 2)}::test_normal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNormal_param_6_{loc_shape=(3, 2), scale_shape=(3, 2), shape=(4, 3, 2)}::test_normal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNormal_param_7_{loc_shape=(3, 2), scale_shape=(3, 2), shape=(3, 2)}::test_normal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsPareto_param_0_{a_shape=(), shape=(4, 3, 2)}::test_pareto -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsPareto_param_1_{a_shape=(), shape=(3, 2)}::test_pareto -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsPareto_param_2_{a_shape=(3, 2), shape=(4, 3, 2)}::test_pareto -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsPareto_param_3_{a_shape=(3, 2), shape=(3, 2)}::test_pareto -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsRayleigh_param_2_{scale_shape=(3, 2), shape=(4, 3, 2)}::test_rayleigh -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsRayleigh_param_2_{scale_shape=(3, 2), shape=(4, 3, 2)}::test_rayleigh_for_zero_scale -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsRayleigh_param_3_{scale_shape=(3, 2), shape=(3, 2)}::test_rayleigh -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsRayleigh_param_3_{scale_shape=(3, 2), shape=(3, 2)}::test_rayleigh_for_zero_scale -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsStandardCauchy_param_0_{shape=(4, 3, 2)}::test_standard_cauchy -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsStandardCauchy_param_1_{shape=(3, 2)}::test_standard_cauchy -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsStandardExponential_param_0_{shape=(4, 3, 2)}::test_standard_exponential -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsStandardExponential_param_1_{shape=(3, 2)}::test_standard_exponential -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsStandardGamma_param_1_{shape=(4, 3, 2), shape_shape=(3, 2)}::test_standard_gamma -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsStandardGamma_param_3_{shape=(3, 2), shape_shape=(3, 2)}::test_standard_gamma -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsStandardNormal_param_0_{shape=(4, 3, 2)}::test_standard_normal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsStandardNormal_param_1_{shape=(3, 2)}::test_standard_normal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsStandardT_param_0_{df_shape=(), shape=(4, 3, 2)}::test_standard_t -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsStandardT_param_1_{df_shape=(), shape=(3, 2)}::test_standard_t -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsStandardT_param_2_{df_shape=(3, 2), shape=(4, 3, 2)}::test_standard_t -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsStandardT_param_3_{df_shape=(3, 2), shape=(3, 2)}::test_standard_t -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_14_{left_shape=(3, 2), mode_shape=(3, 2), right_shape=(3, 2), shape=(4, 3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_15_{left_shape=(3, 2), mode_shape=(3, 2), right_shape=(3, 2), shape=(3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsUniform_param_2_{high_shape=(), low_shape=(3, 2), shape=(4, 3, 2)}::test_uniform -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsUniform_param_3_{high_shape=(), low_shape=(3, 2), shape=(3, 2)}::test_uniform -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsUniform_param_6_{high_shape=(3, 2), low_shape=(3, 2), shape=(4, 3, 2)}::test_uniform -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsUniform_param_7_{high_shape=(3, 2), low_shape=(3, 2), shape=(3, 2)}::test_uniform -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsVonmises_param_0_{kappa_shape=(), mu_shape=(), shape=(4, 3, 2)}::test_vonmises -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsVonmises_param_1_{kappa_shape=(), mu_shape=(), shape=(3, 2)}::test_vonmises -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsVonmises_param_2_{kappa_shape=(), mu_shape=(3, 2), shape=(4, 3, 2)}::test_vonmises -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsVonmises_param_3_{kappa_shape=(), mu_shape=(3, 2), shape=(3, 2)}::test_vonmises -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsVonmises_param_4_{kappa_shape=(3, 2), mu_shape=(), shape=(4, 3, 2)}::test_vonmises -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsVonmises_param_5_{kappa_shape=(3, 2), mu_shape=(), shape=(3, 2)}::test_vonmises -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsVonmises_param_6_{kappa_shape=(3, 2), mu_shape=(3, 2), shape=(4, 3, 2)}::test_vonmises -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsVonmises_param_7_{kappa_shape=(3, 2), mu_shape=(3, 2), shape=(3, 2)}::test_vonmises -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWald_param_6_{mean_shape=(3, 2), scale_shape=(3, 2), shape=(4, 3, 2)}::test_wald -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWald_param_7_{mean_shape=(3, 2), scale_shape=(3, 2), shape=(3, 2)}::test_wald -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWeibull_param_2_{a_shape=(3, 2), shape=(4, 3, 2)}::test_weibull -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWeibull_param_2_{a_shape=(3, 2), shape=(4, 3, 2)}::test_weibull_for_inf_a -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWeibull_param_3_{a_shape=(3, 2), shape=(3, 2)}::test_weibull -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWeibull_param_3_{a_shape=(3, 2), shape=(3, 2)}::test_weibull_for_inf_a -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsBeta_param_6_{a_shape=(3, 2), b_shape=(3, 2), shape=(4, 3, 2)}::test_beta -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsBeta_param_7_{a_shape=(3, 2), b_shape=(3, 2), shape=(3, 2)}::test_beta -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsChisquare_param_0_{df_shape=(), shape=(4, 3, 2)}::test_chisquare -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsChisquare_param_1_{df_shape=(), shape=(3, 2)}::test_chisquare -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsChisquare_param_2_{df_shape=(3, 2), shape=(4, 3, 2)}::test_chisquare -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsChisquare_param_3_{df_shape=(3, 2), shape=(3, 2)}::test_chisquare -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsDirichlet_param_0_{alpha_shape=(3,), shape=(4, 3, 2, 3)}::test_dirichlet -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsDirichlet_param_1_{alpha_shape=(3,), shape=(3, 2, 3)}::test_dirichlet -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsExponential_param_3_{scale_shape=(3, 2), shape=(4, 3, 2)}::test_exponential -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsExponential_param_4_{scale_shape=(3, 2), shape=(3, 2)}::test_exponential -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsExponential_param_5_{scale_shape=(3, 2), shape=None}::test_exponential -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsF_param_0_{dfden_shape=(), dfnum_shape=(), shape=(4, 3, 2)}::test_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsF_param_1_{dfden_shape=(), dfnum_shape=(), shape=(3, 2)}::test_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsF_param_2_{dfden_shape=(), dfnum_shape=(3, 2), shape=(4, 3, 2)}::test_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsF_param_3_{dfden_shape=(), dfnum_shape=(3, 2), shape=(3, 2)}::test_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsF_param_4_{dfden_shape=(3, 2), dfnum_shape=(), shape=(4, 3, 2)}::test_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsF_param_5_{dfden_shape=(3, 2), dfnum_shape=(), shape=(3, 2)}::test_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsF_param_6_{dfden_shape=(3, 2), dfnum_shape=(3, 2), shape=(4, 3, 2)}::test_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsF_param_7_{dfden_shape=(3, 2), dfnum_shape=(3, 2), shape=(3, 2)}::test_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGamma_param_0_{scale_shape=(), shape=(4, 3, 2), shape_shape=()}::test_gamma -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGamma_param_1_{scale_shape=(), shape=(4, 3, 2), shape_shape=(3, 2)}::test_gamma -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGamma_param_2_{scale_shape=(), shape=(3, 2), shape_shape=()}::test_gamma -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGamma_param_3_{scale_shape=(), shape=(3, 2), shape_shape=(3, 2)}::test_gamma -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGamma_param_4_{scale_shape=(3, 2), shape=(4, 3, 2), shape_shape=()}::test_gamma -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGamma_param_5_{scale_shape=(3, 2), shape=(4, 3, 2), shape_shape=(3, 2)}::test_gamma -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGamma_param_6_{scale_shape=(3, 2), shape=(3, 2), shape_shape=()}::test_gamma -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGamma_param_7_{scale_shape=(3, 2), shape=(3, 2), shape_shape=(3, 2)}::test_gamma -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGumbel_param_6_{loc_shape=(3, 2), scale_shape=(3, 2), shape=(4, 3, 2)}::test_gumbel -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsGumbel_param_7_{loc_shape=(3, 2), scale_shape=(3, 2), shape=(3, 2)}::test_gumbel -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsuLaplace_param_6_{loc_shape=(3, 2), scale_shape=(3, 2), shape=(4, 3, 2)}::test_laplace -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsuLaplace_param_7_{loc_shape=(3, 2), scale_shape=(3, 2), shape=(3, 2)}::test_laplace -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsLogistic_param_6_{loc_shape=(3, 2), scale_shape=(3, 2), shape=(4, 3, 2)}::test_logistic -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsLogistic_param_7_{loc_shape=(3, 2), scale_shape=(3, 2), shape=(3, 2)}::test_logistic -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralChisquare_param_6_{df_shape=(3, 2), nonc_shape=(3, 2), shape=(4, 3, 2)}::test_noncentral_chisquare -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralChisquare_param_7_{df_shape=(3, 2), nonc_shape=(3, 2), shape=(3, 2)}::test_noncentral_chisquare -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_14_{dfden_shape=(3, 2), dfnum_shape=(3, 2), nonc_shape=(3, 2), shape=(4, 3, 2)}::test_noncentral_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNoncentralF_param_15_{dfden_shape=(3, 2), dfnum_shape=(3, 2), nonc_shape=(3, 2), shape=(3, 2)}::test_noncentral_f -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNormal_param_0_{loc_shape=(), scale_shape=(), shape=(4, 3, 2)}::test_normal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNormal_param_1_{loc_shape=(), scale_shape=(), shape=(3, 2)}::test_normal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNormal_param_2_{loc_shape=(), scale_shape=(3, 2), shape=(4, 3, 2)}::test_normal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNormal_param_3_{loc_shape=(), scale_shape=(3, 2), shape=(3, 2)}::test_normal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNormal_param_4_{loc_shape=(3, 2), scale_shape=(), shape=(4, 3, 2)}::test_normal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNormal_param_5_{loc_shape=(3, 2), scale_shape=(), shape=(3, 2)}::test_normal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNormal_param_6_{loc_shape=(3, 2), scale_shape=(3, 2), shape=(4, 3, 2)}::test_normal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsNormal_param_7_{loc_shape=(3, 2), scale_shape=(3, 2), shape=(3, 2)}::test_normal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsPareto_param_0_{a_shape=(), shape=(4, 3, 2)}::test_pareto -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsPareto_param_1_{a_shape=(), shape=(3, 2)}::test_pareto -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsPareto_param_2_{a_shape=(3, 2), shape=(4, 3, 2)}::test_pareto -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsPareto_param_3_{a_shape=(3, 2), shape=(3, 2)}::test_pareto -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsRayleigh_param_2_{scale_shape=(3, 2), shape=(4, 3, 2)}::test_rayleigh -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsRayleigh_param_2_{scale_shape=(3, 2), shape=(4, 3, 2)}::test_rayleigh_for_zero_scale -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsRayleigh_param_3_{scale_shape=(3, 2), shape=(3, 2)}::test_rayleigh -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsRayleigh_param_3_{scale_shape=(3, 2), shape=(3, 2)}::test_rayleigh_for_zero_scale -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsStandardCauchy_param_0_{shape=(4, 3, 2)}::test_standard_cauchy -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsStandardCauchy_param_1_{shape=(3, 2)}::test_standard_cauchy -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsStandardExponential_param_0_{shape=(4, 3, 2)}::test_standard_exponential -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsStandardExponential_param_1_{shape=(3, 2)}::test_standard_exponential -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsStandardGamma_param_1_{shape=(4, 3, 2), shape_shape=(3, 2)}::test_standard_gamma -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsStandardGamma_param_3_{shape=(3, 2), shape_shape=(3, 2)}::test_standard_gamma -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsStandardNormal_param_0_{shape=(4, 3, 2)}::test_standard_normal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsStandardNormal_param_1_{shape=(3, 2)}::test_standard_normal -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsStandardT_param_0_{df_shape=(), shape=(4, 3, 2)}::test_standard_t -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsStandardT_param_1_{df_shape=(), shape=(3, 2)}::test_standard_t -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsStandardT_param_2_{df_shape=(3, 2), shape=(4, 3, 2)}::test_standard_t -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsStandardT_param_3_{df_shape=(3, 2), shape=(3, 2)}::test_standard_t -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_14_{left_shape=(3, 2), mode_shape=(3, 2), right_shape=(3, 2), shape=(4, 3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsTriangular_param_15_{left_shape=(3, 2), mode_shape=(3, 2), right_shape=(3, 2), shape=(3, 2)}::test_triangular -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsUniform_param_2_{high_shape=(), low_shape=(3, 2), shape=(4, 3, 2)}::test_uniform -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsUniform_param_3_{high_shape=(), low_shape=(3, 2), shape=(3, 2)}::test_uniform -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsUniform_param_6_{high_shape=(3, 2), low_shape=(3, 2), shape=(4, 3, 2)}::test_uniform -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsUniform_param_7_{high_shape=(3, 2), low_shape=(3, 2), shape=(3, 2)}::test_uniform -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsVonmises_param_0_{kappa_shape=(), mu_shape=(), shape=(4, 3, 2)}::test_vonmises -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsVonmises_param_1_{kappa_shape=(), mu_shape=(), shape=(3, 2)}::test_vonmises -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsVonmises_param_2_{kappa_shape=(), mu_shape=(3, 2), shape=(4, 3, 2)}::test_vonmises -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsVonmises_param_3_{kappa_shape=(), mu_shape=(3, 2), shape=(3, 2)}::test_vonmises -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsVonmises_param_4_{kappa_shape=(3, 2), mu_shape=(), shape=(4, 3, 2)}::test_vonmises -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsVonmises_param_5_{kappa_shape=(3, 2), mu_shape=(), shape=(3, 2)}::test_vonmises -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsVonmises_param_6_{kappa_shape=(3, 2), mu_shape=(3, 2), shape=(4, 3, 2)}::test_vonmises -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsVonmises_param_7_{kappa_shape=(3, 2), mu_shape=(3, 2), shape=(3, 2)}::test_vonmises -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWald_param_6_{mean_shape=(3, 2), scale_shape=(3, 2), shape=(4, 3, 2)}::test_wald -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWald_param_7_{mean_shape=(3, 2), scale_shape=(3, 2), shape=(3, 2)}::test_wald -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWeibull_param_2_{a_shape=(3, 2), shape=(4, 3, 2)}::test_weibull -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWeibull_param_2_{a_shape=(3, 2), shape=(4, 3, 2)}::test_weibull_for_inf_a -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWeibull_param_3_{a_shape=(3, 2), shape=(3, 2)}::test_weibull -tests/third_party/cupy/random_tests/test_distributions.py::TestDistributionsWeibull_param_3_{a_shape=(3, 2), shape=(3, 2)}::test_weibull_for_inf_a From 12ca5d7e98830db4ec804d7271c502eefec26a75 Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Fri, 15 Nov 2024 13:04:08 +0100 Subject: [PATCH 20/29] Update random_tests/test_sample.py --- .../cupy/random_tests/test_sample.py | 134 ++++++++++-------- 1 file changed, 73 insertions(+), 61 deletions(-) diff --git a/tests/third_party/cupy/random_tests/test_sample.py b/tests/third_party/cupy/random_tests/test_sample.py index 2ef8f2605e5..ce6d5d175f7 100644 --- a/tests/third_party/cupy/random_tests/test_sample.py +++ b/tests/third_party/cupy/random_tests/test_sample.py @@ -11,13 +11,14 @@ class TestRandint(unittest.TestCase): + def test_lo_hi_reversed(self): with self.assertRaises(ValueError): random.randint(100, 1) def test_lo_hi_equal(self): with self.assertRaises(ValueError): - random.randint(3, 3, size=3) + random.randint(3, 3, size=0) with self.assertRaises(ValueError): # int(-0.2) is not less than int(0.3) @@ -25,49 +26,46 @@ def test_lo_hi_equal(self): def test_lo_hi_nonrandom(self): a = random.randint(-0.9, 1.1, size=3) - numpy.testing.assert_array_equal(a, cupy.full((3,), 0)) + testing.assert_array_equal(a, cupy.full((3,), 0)) a = random.randint(-1.1, -0.9, size=(2, 2)) - numpy.testing.assert_array_equal(a, cupy.full((2, 2), -1)) + testing.assert_array_equal(a, cupy.full((2, 2), -1)) def test_zero_sizes(self): a = random.randint(10, size=(0,)) - numpy.testing.assert_array_equal(a, cupy.array(())) + testing.assert_array_equal(a, cupy.array(())) a = random.randint(10, size=0) - numpy.testing.assert_array_equal(a, cupy.array(())) + testing.assert_array_equal(a, cupy.array(())) @testing.fix_random() class TestRandint2(unittest.TestCase): - @pytest.mark.usefixtures("allow_fall_back_on_numpy") + @_condition.repeat(3, 10) def test_bound_1(self): - vals = [random.randint(0, 10, (2, 3)) for _ in range(10)] + vals = [random.randint(0, 10, (2, 3)) for _ in range(20)] for val in vals: - self.assertEqual(val.shape, (2, 3)) - self.assertEqual(min(_.min() for _ in vals), 0) - self.assertEqual(max(_.max() for _ in vals), 9) + assert val.shape == (2, 3) + assert min(_.min() for _ in vals) == 0 + assert max(_.max() for _ in vals) == 9 - @pytest.mark.usefixtures("allow_fall_back_on_numpy") @_condition.repeat(3, 10) def test_bound_2(self): vals = [random.randint(0, 2) for _ in range(20)] for val in vals: - self.assertEqual(val.shape, ()) - self.assertEqual(min(_.min() for _ in vals), 0) - self.assertEqual(max(_.max() for _ in vals), 1) + assert val.shape == () + assert min(vals) == 0 + assert max(vals) == 1 - @pytest.mark.usefixtures("allow_fall_back_on_numpy") @_condition.repeat(3, 10) def test_bound_overflow(self): # 100 - (-100) exceeds the range of int8 val = random.randint(numpy.int8(-100), numpy.int8(100), size=20) - self.assertEqual(val.shape, (20,)) - self.assertGreaterEqual(val.min(), -100) - self.assertLess(val.max(), 100) + assert val.shape == (20,) + assert val.min() >= -100 + assert val.max() < 100 - @pytest.mark.usefixtures("allow_fall_back_on_numpy") @_condition.repeat(3, 10) def test_bound_float1(self): # generate floats s.t. int(low) < int(high) @@ -76,26 +74,25 @@ def test_bound_float1(self): high += 1 vals = [random.randint(low, high, (2, 3)) for _ in range(10)] for val in vals: - self.assertEqual(val.shape, (2, 3)) - self.assertEqual(min(_.min() for _ in vals), int(low)) - self.assertEqual(max(_.max() for _ in vals), int(high) - 1) + assert val.shape == (2, 3) + assert min(_.min() for _ in vals) == int(low) + assert max(_.max() for _ in vals) == int(high) - 1 - @pytest.mark.usefixtures("allow_fall_back_on_numpy") def test_bound_float2(self): vals = [random.randint(-1.0, 1.0, (2, 3)) for _ in range(10)] for val in vals: - self.assertEqual(val.shape, (2, 3)) - self.assertEqual(min(_.min() for _ in vals), -1) - self.assertEqual(max(_.max() for _ in vals), 0) + assert val.shape == (2, 3) + assert min(_.min() for _ in vals) == -1 + assert max(_.max() for _ in vals) == 0 @_condition.repeat(3, 10) def test_goodness_of_fit(self): mx = 5 trial = 100 - vals = [numpy.random.randint(mx) for _ in range(trial)] + vals = [random.randint(mx) for _ in range(trial)] counts = numpy.histogram(vals, bins=numpy.arange(mx + 1))[0] expected = numpy.array([float(trial) / mx] * mx) - self.assertTrue(_hypothesis.chi_square_test(counts, expected)) + assert _hypothesis.chi_square_test(counts, expected) @_condition.repeat(3, 10) def test_goodness_of_fit_2(self): @@ -103,43 +100,52 @@ def test_goodness_of_fit_2(self): vals = random.randint(mx, size=(5, 20)) counts = numpy.histogram(vals, bins=numpy.arange(mx + 1))[0] expected = numpy.array([float(vals.size) / mx] * mx) - self.assertTrue(_hypothesis.chi_square_test(counts, expected)) + assert _hypothesis.chi_square_test(counts, expected) class TestRandintDtype(unittest.TestCase): - # numpy.int8, numpy.uint8, numpy.int16, numpy.uint16, numpy.int32]) - @testing.for_dtypes([numpy.int32]) + + @testing.with_requires("numpy>=2.0") + @testing.for_dtypes( + [numpy.int8, numpy.uint8, numpy.int16, numpy.uint16, numpy.int32] + ) def test_dtype(self, dtype): size = (1000,) low = numpy.iinfo(dtype).min - high = numpy.iinfo(dtype).max - x = random.randint(low, high, size, dtype) - self.assertLessEqual(low, min(x)) - self.assertLessEqual(max(x), high) + high = numpy.iinfo(dtype).max + 1 + x = random.randint(low, high, size, dtype).get() + assert low <= min(x) + assert max(x) <= high - # @testing.for_int_dtypes(no_bool=True) + @pytest.mark.skip("high=(max+1) is not supported") + @testing.for_int_dtypes(no_bool=True) @testing.for_dtypes([numpy.int32]) def test_dtype2(self, dtype): dtype = numpy.dtype(dtype) + # randint does not support 64 bit integers + if dtype in (numpy.int64, numpy.uint64): + return + iinfo = numpy.iinfo(dtype) size = (10000,) - x = random.randint(iinfo.min, iinfo.max, size, dtype) - self.assertEqual(x.dtype, dtype) - self.assertLessEqual(iinfo.min, min(x)) - self.assertLessEqual(max(x), iinfo.max) + x = random.randint(iinfo.min, iinfo.max + 1, size, dtype).get() + assert x.dtype == dtype + assert iinfo.min <= min(x) + assert max(x) <= iinfo.max # Lower bound check - with self.assertRaises(OverflowError): + with self.assertRaises(ValueError): random.randint(iinfo.min - 1, iinfo.min + 10, size, dtype) # Upper bound check - with self.assertRaises(OverflowError): + with self.assertRaises(ValueError): random.randint(iinfo.max - 10, iinfo.max + 2, size, dtype) class TestRandomIntegers(unittest.TestCase): + def test_normal(self): with mock.patch("dpnp.random.RandomState.randint") as m: random.random_integers(3, 5) @@ -164,50 +170,53 @@ def test_size_is_not_none(self): @testing.fix_random() class TestRandomIntegers2(unittest.TestCase): + @_condition.repeat(3, 10) def test_bound_1(self): - vals = [random.random_integers(0, 10, (2, 3)).get() for _ in range(10)] + vals = [random.random_integers(0, 10, (2, 3)) for _ in range(10)] for val in vals: - self.assertEqual(val.shape, (2, 3)) - self.assertEqual(min(_.min() for _ in vals), 0) - self.assertEqual(max(_.max() for _ in vals), 10) + assert val.shape == (2, 3) + assert min(_.min() for _ in vals) == 0 + assert max(_.max() for _ in vals) == 10 @_condition.repeat(3, 10) def test_bound_2(self): - vals = [random.random_integers(0, 2).get() for _ in range(20)] + vals = [random.random_integers(0, 2) for _ in range(20)] for val in vals: - self.assertEqual(val.shape, ()) - self.assertEqual(min(vals), 0) - self.assertEqual(max(vals), 2) + assert val.shape == () + assert min(vals) == 0 + assert max(vals) == 2 @_condition.repeat(3, 10) def test_goodness_of_fit(self): mx = 5 trial = 100 - vals = [random.randint(0, mx).get() for _ in range(trial)] + vals = [random.randint(0, mx) for _ in range(trial)] counts = numpy.histogram(vals, bins=numpy.arange(mx + 1))[0] expected = numpy.array([float(trial) / mx] * mx) - self.assertTrue(_hypothesis.chi_square_test(counts, expected)) + assert _hypothesis.chi_square_test(counts, expected) @_condition.repeat(3, 10) def test_goodness_of_fit_2(self): mx = 5 - vals = random.randint(0, mx, (5, 20)).get() + vals = random.randint(0, mx, (5, 20)) counts = numpy.histogram(vals, bins=numpy.arange(mx + 1))[0] expected = numpy.array([float(vals.size) / mx] * mx) - self.assertTrue(_hypothesis.chi_square_test(counts, expected)) + assert _hypothesis.chi_square_test(counts, expected) +@pytest.mark.skip("random.choice() is not supported yet") class TestChoice(unittest.TestCase): + def setUp(self): - self.rs_tmp = random.generator._random_states + self.rs_tmp = random._generator._random_states device_id = cuda.Device().id self.m = mock.Mock() self.m.choice.return_value = 0 - random.generator._random_states = {device_id: self.m} + random._generator._random_states = {device_id: self.m} def tearDown(self): - random.generator._random_states = self.rs_tmp + random._generator._random_states = self.rs_tmp def test_size_and_replace_and_p_are_none(self): random.choice(3) @@ -243,10 +252,11 @@ def test_no_none(self): class TestRandomSample(unittest.TestCase): + def test_rand(self): - # no keyword argument 'dtype' in dpnp - with self.assertRaises(TypeError): - random.rand(1, 2, 3, dtype=numpy.float32) + with mock.patch("dpnp.random.RandomState.random_sample") as m: + random.rand(1, 2, 3) + m.assert_called_once_with(size=(1, 2, 3), usm_type="device") def test_rand_default_dtype(self): with mock.patch("dpnp.random.RandomState.random_sample") as m: @@ -280,12 +290,14 @@ def test_randn_invalid_argument(self): {"size": (1, 0)}, ) @testing.fix_random() +@pytest.mark.skip("random.multinomial() is not fully supported") class TestMultinomial(unittest.TestCase): + @_condition.repeat(3, 10) @testing.for_float_dtypes() @testing.numpy_cupy_allclose(rtol=0.05) def test_multinomial(self, xp, dtype): pvals = xp.array([0.2, 0.3, 0.5], dtype) x = xp.random.multinomial(100000, pvals, self.size) - self.assertEqual(x.dtype, "l") + assert x.dtype.kind == "l" return x / 100000 From e405d242442af7130cc4587fda2a63cf9a3b38a3 Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Fri, 15 Nov 2024 13:13:00 +0100 Subject: [PATCH 21/29] Update random_tests/test_random.py --- .../cupy/random_tests/test_random.py | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 tests/third_party/cupy/random_tests/test_random.py diff --git a/tests/third_party/cupy/random_tests/test_random.py b/tests/third_party/cupy/random_tests/test_random.py new file mode 100644 index 00000000000..d548d8f8e6a --- /dev/null +++ b/tests/third_party/cupy/random_tests/test_random.py @@ -0,0 +1,22 @@ +import unittest + +import pytest + +from dpnp import random +from tests.third_party.cupy import testing + + +@pytest.mark.skip("random.get_random_state() is not supported yet") +class TestResetSeed(unittest.TestCase): + + @testing.for_float_dtypes(no_float16=True) + def test_reset_seed(self, dtype): + rs = random.get_random_state() + rs.seed(0) + l1 = rs.rand(10, dtype=dtype) + + rs = random.get_random_state() + rs.seed(0) + l2 = rs.rand(10, dtype=dtype) + + testing.assert_array_equal(l1, l2) From 243612d79d55e9a362c33edf1ec1ec4391409646 Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Fri, 15 Nov 2024 13:32:19 +0100 Subject: [PATCH 22/29] Update random_tests/test_permutations.py --- .../cupy/random_tests/test_permutations.py | 202 ++++++++++++++++++ 1 file changed, 202 insertions(+) create mode 100644 tests/third_party/cupy/random_tests/test_permutations.py diff --git a/tests/third_party/cupy/random_tests/test_permutations.py b/tests/third_party/cupy/random_tests/test_permutations.py new file mode 100644 index 00000000000..596b1b3e044 --- /dev/null +++ b/tests/third_party/cupy/random_tests/test_permutations.py @@ -0,0 +1,202 @@ +import unittest + +import numpy +import pytest + +import dpnp as cupy +from tests.helper import has_support_aspect64 +from tests.third_party.cupy import testing +from tests.third_party.cupy.testing import _condition + + +@testing.parameterize( + {"seed": None}, + {"seed": 0}, +) +@pytest.mark.skipif(not has_support_aspect64(), reason="fp64 is required") +class TestPermutations(unittest.TestCase): + + def _xp_random(self, xp): + if self.seed is None: + return xp.random + else: + pytest.skip("random.RandomState.permutation() is not supported yet") + return xp.random.RandomState(seed=self.seed) + + # Test ranks + + # TODO(niboshi): Fix xfail + @pytest.mark.xfail(reason="Explicit error types required") + def test_permutation_zero_dim(self): + for xp in (numpy, cupy): + xp_random = self._xp_random(xp) + a = testing.shaped_random((), xp) + with pytest.raises(IndexError): + xp_random.permutation(a) + + # Test same values + + @testing.for_all_dtypes(no_float16=True, no_bool=True, no_complex=True) + def test_permutation_sort_1dim(self, dtype): + cupy_random = self._xp_random(cupy) + a = cupy.arange(10, dtype=dtype) + b = cupy.copy(a) + c = cupy_random.permutation(a) + testing.assert_allclose(a, b) + testing.assert_allclose(b, cupy.sort(c)) + + @testing.for_all_dtypes(no_float16=True, no_bool=True, no_complex=True) + def test_permutation_sort_ndim(self, dtype): + cupy_random = self._xp_random(cupy) + a = cupy.arange(15, dtype=dtype).reshape(5, 3) + b = cupy.copy(a) + c = cupy_random.permutation(a) + testing.assert_allclose(a, b) + testing.assert_allclose(b, cupy.sort(c, axis=0)) + + # Test seed + + @testing.for_all_dtypes() + def test_permutation_seed1(self, dtype): + a = testing.shaped_random((10,), cupy, dtype) + b = cupy.copy(a) + + cupy_random = self._xp_random(cupy) + if self.seed is None: + cupy_random.seed(0) + pa = cupy_random.permutation(a) + cupy_random = self._xp_random(cupy) + if self.seed is None: + cupy_random.seed(0) + pb = cupy_random.permutation(b) + + testing.assert_allclose(pa, pb) + + +@pytest.mark.skipif(not has_support_aspect64(), reason="fp64 is required") +class TestShuffle(unittest.TestCase): + + # Test ranks + + @pytest.mark.skip("no proper validation yet") + def test_shuffle_zero_dim(self): + for xp in (numpy, cupy): + a = testing.shaped_random((), xp) + with pytest.raises(TypeError): + xp.random.shuffle(a) + + # Test same values + + @testing.for_all_dtypes(no_float16=True, no_bool=True, no_complex=True) + def test_shuffle_sort_1dim(self, dtype): + a = cupy.arange(10, dtype=dtype) + b = cupy.copy(a) + cupy.random.shuffle(a) + testing.assert_allclose(cupy.sort(a), b) + + @testing.for_all_dtypes(no_float16=True, no_bool=True, no_complex=True) + def test_shuffle_sort_ndim(self, dtype): + a = cupy.arange(15, dtype=dtype).reshape(5, 3) + b = cupy.copy(a) + cupy.random.shuffle(a) + testing.assert_allclose(cupy.sort(a, axis=0), b) + + # Test seed + + @testing.for_all_dtypes() + def test_shuffle_seed1(self, dtype): + a = testing.shaped_random((10,), cupy, dtype) + b = cupy.copy(a) + cupy.random.seed(0) + cupy.random.shuffle(a) + cupy.random.seed(0) + cupy.random.shuffle(b) + testing.assert_allclose(a, b) + + +@testing.parameterize( + *( + testing.product( + { + # 'num': [0, 1, 100, 1000, 10000, 100000], + "num": [0, 1, 100], # dpnp.random.permutation() is slow + } + ) + ) +) +@pytest.mark.skipif(not has_support_aspect64(), reason="fp64 is required") +class TestPermutationSoundness(unittest.TestCase): + + def setUp(self): + a = cupy.random.permutation(self.num) + self.a = a + + # Test soundness + + @_condition.repeat(3) + def test_permutation_soundness(self): + assert (numpy.sort(self.a) == numpy.arange(self.num)).all() + + +@testing.parameterize( + *( + testing.product( + { + "offset": [0, 17, 34, 51], + "gap": [1, 2, 3, 5, 7], + "mask": [1, 2, 4, 8, 16, 32, 64, 128], + } + ) + ) +) +class TestPermutationRandomness(unittest.TestCase): + + num = 256 + + def setUp(self): + a = cupy.random.permutation(self.num) + self.a = a + self.num_half = int(self.num / 2) + + # Simple bit proportion test + + # This test is to check kind of randomness of permutation. + # An intuition behind this test is that, when you make a sub-array + # by regularly extracting half elements from the permuted array, + # the sub-array should also hold randomness and accordingly + # frequency of appearance of 0 and 1 at each bit position of + # whole elements in the sub-array should become similar + # when elements count of original array is 2^N. + # Note that this is not an established method to check randomness. + # TODO(anaruse): implement randomness check using some established methods. + @_condition.repeat_with_success_at_least(5, 3) + @pytest.mark.skip("no support of index as numpy array") + def test_permutation_randomness(self): + if self.mask > self.num_half: + return + index = numpy.arange(self.num_half) + index = (index * self.gap + self.offset) % self.num + samples = self.a[index] + ret = samples & self.mask > 0 + count = numpy.count_nonzero(ret) # expectation: self.num_half / 2 + if count > self.num_half - count: + count = self.num_half - count + prob_le_count = self._calc_probability(count) + if prob_le_count < 0.001: + raise + + def _calc_probability(self, count): + comb_all = self._comb(self.num, self.num_half) + comb_le_count = 0 + for i in range(count + 1): + tmp = self._comb(self.num_half, i) + comb_i = tmp * tmp + comb_le_count += comb_i + prob = comb_le_count / comb_all + return prob + + def _comb(self, N, k): + val = numpy.float64(1) + for i in range(k): + val *= (N - i) / (k - i) + return val From 142b4fa705587a93a7f24c0f84100fa672d87141 Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Fri, 15 Nov 2024 13:43:56 +0100 Subject: [PATCH 23/29] Add random_tests/test_init.py --- tests/third_party/cupy/random_tests/test_init.py | 9 +++++++++ 1 file changed, 9 insertions(+) create mode 100644 tests/third_party/cupy/random_tests/test_init.py diff --git a/tests/third_party/cupy/random_tests/test_init.py b/tests/third_party/cupy/random_tests/test_init.py new file mode 100644 index 00000000000..1f45bfd4d86 --- /dev/null +++ b/tests/third_party/cupy/random_tests/test_init.py @@ -0,0 +1,9 @@ +import pytest + +import dpnp as cupy + + +@pytest.mark.usefixtures("allow_fall_back_on_numpy") +def test_bytes(): + out = cupy.random.bytes(10) + assert isinstance(out, bytes) From 17ba4cc780f8ea8e699cba1617ae6af435fc6aad Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Fri, 15 Nov 2024 14:49:56 +0100 Subject: [PATCH 24/29] Add common_distributions.py, test_generator_api.py, common_distributions.py --- .../cupy/random_tests/common_distributions.py | 517 +++++++ .../cupy/random_tests/test_generator.py | 1298 +++++++++++++++++ .../cupy/random_tests/test_generator_api.py | 338 +++++ 3 files changed, 2153 insertions(+) create mode 100644 tests/third_party/cupy/random_tests/common_distributions.py create mode 100644 tests/third_party/cupy/random_tests/test_generator.py create mode 100644 tests/third_party/cupy/random_tests/test_generator_api.py diff --git a/tests/third_party/cupy/random_tests/common_distributions.py b/tests/third_party/cupy/random_tests/common_distributions.py new file mode 100644 index 00000000000..5cb1cc44e20 --- /dev/null +++ b/tests/third_party/cupy/random_tests/common_distributions.py @@ -0,0 +1,517 @@ +import functools +import unittest + +import numpy + +import dpnp as cupy +from tests.third_party.cupy import testing +from tests.third_party.cupy.testing import _condition + + +def two_sample_Kolmogorov_Smirnov_test(observed1, observed2): + """Computes the Kolmogorov-Smirnov statistic on 2 samples + + Unlike `scipy.stats.ks_2samp`, the returned p-value is not accurate + for large p. + """ + assert observed1.dtype == observed2.dtype + (n1,) = observed1.shape + (n2,) = observed2.shape + assert n1 >= 100 and n2 >= 100 + observed = numpy.concatenate([observed1, observed2]) + indices = numpy.argsort(observed) + observed = observed[indices] # sort + ds = numpy.cumsum(numpy.where(indices < n1, -n2, n1).astype(numpy.int64)) + assert ds[-1] == 0 + check = numpy.concatenate([observed[:-1] < observed[1:], [True]]) + ds = ds[check] + d_plus = float(ds.max()) / (n1 * n2) + d_minus = -float(ds.min()) / (n1 * n2) + d = max(d_plus, d_minus) + # Approximate p = special.kolmogorov(d * numpy.sqrt(n1 * n2 / (n1 + n2))) + p = min(1.0, 2.0 * numpy.exp(-2.0 * d**2 * n1 * n2 / (n1 + n2))) + return d_plus, d_minus, p + + +class BaseGeneratorTestCase(unittest.TestCase): + + target_method = None + + def get_rng(self, xp, seed): + pass + + def set_rng_seed(self, seed): + pass + + def setUp(self): + self.__seed = testing.generate_seed() + # rng will be a new or old generator API object + self.rng = self.get_rng(cupy, self.__seed) + + def _get_generator_func(self, *args, **kwargs): + assert isinstance( + self.target_method, str + ), "generate_method must be overridden" + f = getattr(self.rng, self.target_method) + return lambda: f(*args, **kwargs) + + def _generate_check_repro(self, func, seed): + # Sample a random array while checking reproducibility + self.set_rng_seed(seed) + x = func() + self.set_rng_seed(seed) + y = func() + testing.assert_array_equal( + x, y, "Randomly generated arrays with the same seed did not match" + ) + return x + + def generate(self, *args, **kwargs): + # Pick one sample from generator. + # Reproducibility is checked by repeating seed-and-sample cycle twice. + func = self._get_generator_func(*args, **kwargs) + return self._generate_check_repro(func, self.__seed) + + def generate_many(self, *args, **kwargs): + # Pick many samples from generator. + # Reproducibility is checked only for the first sample, + # because it's very slow to set seed every time. + _count = kwargs.pop("_count", None) + assert _count is not None, "_count is required" + func = self._get_generator_func(*args, **kwargs) + + if _count == 0: + return [] + + vals = [self._generate_check_repro(func, self.__seed)] + for _ in range(1, _count): + vals.append(func()) + return vals + + def check_ks(self, significance_level, cupy_len=100, numpy_len=1000): + return functools.partial( + self._check_ks, significance_level, cupy_len, numpy_len + ) + + def _check_ks( + self, significance_level, cupy_len, numpy_len, *args, **kwargs + ): + assert "size" in kwargs + + # cupy + func = self._get_generator_func(*args, **kwargs) + vals_cupy = func() + assert vals_cupy.size > 0 + count = 1 + (cupy_len - 1) // vals_cupy.size + vals_cupy = [vals_cupy] + for _ in range(1, count): + vals_cupy.append(func()) + vals_cupy = cupy.stack(vals_cupy).ravel() + + # numpy + kwargs["size"] = numpy_len + dtype = kwargs.pop("dtype", None) + numpy_rng = self.get_rng(numpy, self.__seed) + vals_numpy = getattr(numpy_rng, self.target_method)(*args, **kwargs) + if dtype is not None: + vals_numpy = vals_numpy.astype(dtype, copy=False) + + # test + d_plus, d_minus, p_value = two_sample_Kolmogorov_Smirnov_test( + cupy.asnumpy(vals_cupy), vals_numpy + ) + if p_value < significance_level: + message = """Rejected null hypothesis: +p: %f +D+ (cupy < numpy): %f +D- (cupy > numpy): %f""" % ( + p_value, + d_plus, + d_minus, + ) + raise AssertionError(message) + + +uniform_params = [ + {"low": 1, "high": 10.0, "size": (3, 5)}, + {"low": [1, 2], "high": 3, "size": None}, + {"low": 20, "high": 20.1, "size": 1000}, +] + + +class Uniform: + target_method = "uniform" + + def test_uniform(self): + low = self.low + if isinstance(low, list): + low = cupy.array(low) + high = self.high + if isinstance(high, list): + high = cupy.array(high) + + result = self.generate(low, high, self.size) + assert cupy.all(result >= cupy.asarray(low).min()) + assert cupy.all(result < cupy.asarray(high).max()) + + @_condition.repeat_with_success_at_least(10, 3) + def test_uniform_ks(self): + if isinstance(self.low, list) or isinstance(self.high, list): + self.skipTest("Stastical checks only for scalar args") + self.check_ks(0.05)(low=self.low, high=self.low, size=2000) + + +beta_params = [ + {"a": 1.0, "b": 3.0}, + {"a": 3.0, "b": 3.0}, + {"a": 3.0, "b": 1.0}, + {"a": [1.0, 3.0, 5.0, 6.0, 9.0], "b": 7.0}, + {"a": 5.0, "b": [1.0, 5.0, 8.0, 1.0, 3.0]}, + {"a": [8.0, 6.0, 2.0, 4.0, 7.0], "b": [3.0, 1.0, 2.0, 8.0, 1.0]}, +] + + +class Beta: + + target_method = "beta" + + def test_beta(self): + a = self.a + b = self.b + if isinstance(self.a, list) or isinstance(self.b, list): + a = cupy.array(self.a) + b = cupy.array(self.b) + self.generate(a, b, size=(3, 5)) + + @_condition.repeat_with_success_at_least(10, 3) + def test_beta_ks(self): + if isinstance(self.a, list) or isinstance(self.b, list): + self.skipTest("Stastical checks only for scalar args") + self.check_ks(0.05)(a=self.a, b=self.b, size=2000) + + +class StandardExponential: + + target_method = "standard_exponential" + + def test_standard_exponential(self): + self.generate(size=(3, 2)) + + @testing.slow + @_condition.repeat(10) + def test_standard_exponential_isfinite(self): + x = self.generate(size=10**7) + assert cupy.isfinite(x).all() + + @testing.for_dtypes("fd") + @_condition.repeat_with_success_at_least(10, 3) + def test_standard_exponential_ks(self, dtype): + self.check_ks(0.05)(size=2000, dtype=dtype) + + +standard_gamma_params = [{"shape": 0.5}, {"shape": 1.0}, {"shape": 3.0}] + + +class StandardGamma: + + target_method = "standard_gamma" + + def test_standard_gamma(self): + self.generate(shape=self.shape, size=(3, 2)) + + @testing.for_dtypes("fd") + @_condition.repeat_with_success_at_least(10, 3) + def test_standard_gamma_ks(self, dtype): + self.check_ks(0.05)(shape=self.shape, size=2000, dtype=dtype) + + +standard_normal_params = [ + {"size": None}, + {"size": (1, 2, 3)}, + {"size": 3}, + {"size": (1000, 1000)}, + {"size": (3, 3)}, + {"size": ()}, +] + + +class StandardNormal: + + target_method = "standard_normal" + + @testing.for_dtypes("fd") + @_condition.repeat_with_success_at_least(10, 3) + def test_normal_ks(self, dtype): + self.check_ks(0.05)(size=self.size, dtype=dtype) + + +exponential_params = [{"scale": 0.5}, {"scale": 1}, {"scale": 10}] + + +class Exponential: + + target_method = "exponential" + + def test_exponential(self): + self.generate(scale=self.scale, size=(3, 2)) + + @_condition.repeat_with_success_at_least(10, 3) + def test_exponential_ks(self): + self.check_ks(0.05)(self.scale, size=2000) + + +poisson_params = [{"lam": 1.0}, {"lam": 3.0}, {"lam": 10.0}] + + +class Poisson: + + target_method = "poisson" + + def test_poisson(self): + self.generate(lam=self.lam, size=(3, 2)) + + @_condition.repeat_with_success_at_least(10, 3) + def test_poisson_ks(self): + self.check_ks(0.05)(lam=self.lam, size=2000) + + def test_poisson_large(self): + self.generate(lam=self.lam, size=(1000, 1000)) + + +gamma_params = [ + {"shape": 0.5, "scale": 0.5}, + {"shape": 1.0, "scale": 0.5}, + {"shape": 3.0, "scale": 0.5}, + {"shape": 0.5, "scale": 1.0}, + {"shape": 1.0, "scale": 1.0}, + {"shape": 3.0, "scale": 1.0}, + {"shape": 0.5, "scale": 3.0}, + {"shape": 1.0, "scale": 3.0}, + {"shape": 3.0, "scale": 3.0}, +] + + +class Gamma: + + target_method = "gamma" + + def test_gamma_1(self): + self.generate(shape=self.shape, scale=self.scale, size=(3, 2)) + + def test_gamma_2(self): + self.generate(shape=self.shape, size=(3, 2)) + + @_condition.repeat_with_success_at_least(10, 3) + def test_gamma_ks(self): + self.check_ks(0.05)(self.shape, self.scale, size=2000) + + +binomial_params = [ + {"n": 2, "p": 0.5}, + {"n": 5, "p": 0.5}, + {"n": 10, "p": 0.5}, + {"n": 2, "p": 0.1}, + {"n": 5, "p": 0.1}, + {"n": 10, "p": 0.1}, + {"n": 2, "p": 1.0}, + {"n": 2, "p": 1.0}, + {"n": 2, "p": 1.0}, +] + + +class Binomial: + + target_method = "binomial" + + def test_binomial(self): + self.generate(n=self.n, p=self.p, size=(3, 2)) + + @_condition.repeat_with_success_at_least(10, 3) + def test_binomial_ks(self): + self.check_ks(0.05)(self.n, self.p, size=2000) + + +geometric_params = [ + {"p": 0.5}, + {"p": 0.1}, + {"p": 1.0}, + {"p": [0.1, 0.5]}, +] + + +class Geometric: + + target_method = "geometric" + + def test_geometric(self): + p = self.p + if not isinstance(self.p, float): + p = cupy.array(self.p) + self.generate(p=p, size=(3, 2)) + + @_condition.repeat_with_success_at_least(10, 3) + def test_geometric_ks(self): + if not isinstance(self.p, float): + self.skipTest("Statistical checks only for scalar `p`") + self.check_ks(0.05)(p=self.p, size=2000) + + +hypergeometric_params = [ + {"ngood": 5, "nbad": 5, "nsample": 5}, + {"ngood": 10, "nbad": 10, "nsample": 10}, + {"ngood": 100, "nbad": 2, "nsample": 10}, + {"ngood": [0, 5, 8], "nbad": [5, 0, 3], "nsample": [2, 1, 8]}, + {"ngood": [1, 4, 2, 7, 6], "nbad": 5.0, "nsample": [2, 7, 4, 6, 5]}, +] + + +class Hypergeometric: + + target_method = "hypergeometric" + + def test_hypergeometric(self): + ngood = self.ngood + nbad = self.nbad + nsample = self.nsample + if ( + isinstance(self.ngood, list) + or isinstance(self.nbad, list) + or isinstance(self.nsample, list) + ): + ngood = cupy.array(self.ngood) + nbad = cupy.array(self.nbad) + nsample = cupy.array(self.nsample) + self.generate(ngood, nbad, nsample) + + @_condition.repeat_with_success_at_least(10, 3) + def test_hypergeometric_ks(self): + if ( + isinstance(self.ngood, list) + or isinstance(self.nbad, list) + or isinstance(self.nsample, list) + ): + self.skipTest("Stastical checks only for scalar args") + self.check_ks(0.05)(self.ngood, self.nbad, self.nsample, size=2000) + + +power_params = [ + {"a": 0.5}, + {"a": 1}, + {"a": 5}, + {"a": [0.8, 0.7, 1, 2, 5]}, +] + + +class Power: + + target_method = "power" + + def test_power(self): + a = self.a + if not isinstance(self.a, float): + a = cupy.array(self.a) + self.generate(a=a) + + @_condition.repeat_with_success_at_least(10, 3) + def test_power_ks(self): + if not isinstance(self.a, float): + self.skipTest("Statistical checks only for scalar `a`") + self.check_ks(0.05)(a=self.a, size=2000) + + +logseries_params = [ + {"p": 0.5}, + {"p": 0.1}, + {"p": 0.9}, + {"p": [0.8, 0.7]}, +] + + +class Logseries: + + target_method = "logseries" + + def test_logseries(self): + p = self.p + if not isinstance(self.p, float): + p = cupy.array(self.p) + self.generate(p=p, size=(3, 2)) + + @_condition.repeat_with_success_at_least(10, 3) + def test_geometric_ks(self): + if not isinstance(self.p, float): + self.skipTest("Statistical checks only for scalar `p`") + self.check_ks(0.05)(p=self.p, size=2000) + + +chisquare_params = [ + {"df": 1.0}, + {"df": 3.0}, + {"df": 10.0}, + {"df": [2, 5, 8]}, +] + + +class Chisquare: + + target_method = "chisquare" + + def test_chisquare(self): + df = self.df + if not isinstance(self.df, float): + df = cupy.array(self.df) + self.generate(df=df) + + @_condition.repeat_with_success_at_least(10, 3) + def test_chisquare_ks(self): + if not isinstance(self.df, float): + self.skipTest("Statistical checks only for scalar `df`") + self.check_ks(0.05)(df=self.df, size=2000) + + +f_params = [ + {"dfnum": 1.0, "dfden": 3.0}, + {"dfnum": 3.0, "dfden": 3.0}, + {"dfnum": 3.0, "dfden": 1.0}, + {"dfnum": [1.0, 3.0, 3.0], "dfden": [3.0, 3.0, 1.0]}, +] + + +class F: + + target_method = "f" + + def test_f(self): + dfnum = self.dfnum + dfden = self.dfden + if isinstance(self.dfnum, list) or isinstance(self.dfden, list): + dfnum = cupy.array(self.dfnum) + dfden = cupy.array(self.dfden) + self.generate(dfnum, dfden) + + @_condition.repeat_with_success_at_least(10, 3) + def test_f_ks(self): + if isinstance(self.dfnum, list) or isinstance(self.dfden, list): + self.skipTest("Stastical checks only for scalar args") + self.check_ks(0.05)(self.dfnum, self.dfden, size=2000) + + +dirichlet_params = [{"alpha": 5}, {"alpha": 1}, {"alpha": [2, 5, 8]}] + + +class Dirichlet: + target_method = "dirichlet" + + def test_dirichlet(self): + alpha = self.alpha + if not isinstance(self.alpha, float): + alpha = cupy.array(self.alpha) + self.generate(alpha=alpha, size=(3, 2)) + + def test_dirichlet_int_shape(self): + alpha = self.alpha + if not isinstance(self.alpha, int): + alpha = cupy.array(self.alpha) + self.generate(alpha=alpha, size=5) + + # TODO(kataoka): add distribution test diff --git a/tests/third_party/cupy/random_tests/test_generator.py b/tests/third_party/cupy/random_tests/test_generator.py new file mode 100644 index 00000000000..f18912f6538 --- /dev/null +++ b/tests/third_party/cupy/random_tests/test_generator.py @@ -0,0 +1,1298 @@ +import functools +import os +import threading +import unittest + +import numpy +import pytest + +import dpnp as cupy + +# from cupy import cuda +# from cupy.cuda import runtime +# from cupy.random import _generator +from tests.third_party.cupy import testing +from tests.third_party.cupy.testing import _condition, _hypothesis + +from . import common_distributions + +pytest.skip("random.generator() is not supported yet", allow_module_level=True) + + +def numpy_cupy_equal_continuous_distribution(significance_level, name="xp"): + """Decorator that tests the distributions of NumPy samples and CuPy ones. + + Args: + significance_level (float): The test fails if p-value is lower than + this argument. + name(str): Argument name whose value is either + ``numpy`` or ``cupy`` module. + + Decorated test fixture is required to return samples from the same + distribution even if ``xp`` is ``numpy`` or ``cupy``. + + """ + + def decorator(impl): + @functools.wraps(impl) + def test_func(self, *args, **kw): + kw[name] = cupy + cupy_result = impl(self, *args, **kw) + + kw[name] = numpy + numpy_result = impl(self, *args, **kw) + + assert cupy_result is not None + assert numpy_result is not None + d_plus, d_minus, p_value = ( + common_distributions.two_sample_Kolmogorov_Smirnov_test( + cupy.asnumpy(cupy_result), numpy_result + ) + ) + if p_value < significance_level: + message = """Rejected null hypothesis: +p: %f +D+ (cupy < numpy): %f +D- (cupy > numpy): %f""" % ( + p_value, + d_plus, + d_minus, + ) + raise AssertionError(message) + + return test_func + + return decorator + + +def _get_size(size): + # CuPy returns an ndarray of shape () even if size=None. + # cf. NumPy returns a Python scalar if size=None. + if size is None: + return () + return cupy._core.get_size(size) + + +class RandomGeneratorTestCase(common_distributions.BaseGeneratorTestCase): + + target_method = None + + def get_rng(self, xp, seed): + return xp.random.RandomState(seed=seed) + + def set_rng_seed(self, seed): + self.rng.seed(seed) + + +def _xp_random(xp, method_name): + method = getattr(xp.random.RandomState(), method_name) + if xp == cupy: + return method + + def f(*args, **kwargs): + dtype = kwargs.pop("dtype", None) + ret = method(*args, **kwargs) + if dtype is not None: + ret = ret.astype(dtype, copy=False) + return ret + + return f + + +@testing.fix_random() +class TestRandomState(unittest.TestCase): + + def setUp(self): + self.rs = _generator.RandomState(seed=testing.generate_seed()) + + def check_seed(self, seed): + rs = self.rs + + rs.seed(seed) + xs1 = [rs.uniform() for _ in range(100)] + + rs.seed(seed) + xs2 = [rs.uniform() for _ in range(100)] + + rs.seed(seed) + rs.seed(None) + xs3 = [rs.uniform() for _ in range(100)] + + # Random state must be reproducible + assert xs1 == xs2 + # Random state must be initialized randomly with seed=None + assert xs1 != xs3 + + @testing.for_int_dtypes() + def test_seed_not_none(self, dtype): + self.check_seed(dtype(0)) + + @testing.for_dtypes([numpy.complex128]) + def test_seed_invalid_type_complex(self, dtype): + with self.assertRaises(TypeError): + self.rs.seed(dtype(0)) + + @testing.for_float_dtypes() + def test_seed_invalid_type_float(self, dtype): + with self.assertRaises(TypeError): + self.rs.seed(dtype(0)) + + def test_array_seed(self): + self.check_seed(numpy.random.randint(0, 2**31, size=40)) + + def test_methods(self): + methods = [ + cuda.curand.CURAND_RNG_PSEUDO_DEFAULT, + cuda.curand.CURAND_RNG_PSEUDO_MRG32K3A, + cupy.cuda.curand.CURAND_RNG_PSEUDO_MT19937, + cupy.cuda.curand.CURAND_RNG_PSEUDO_PHILOX4_32_10, + cupy.cuda.curand.CURAND_RNG_PSEUDO_MTGP32, + cupy.cuda.curand.CURAND_RNG_PSEUDO_XORWOW, + ] + + for method in methods: + if ( + runtime.is_hip + and method == cupy.cuda.curand.CURAND_RNG_PSEUDO_MT19937 + ): + # hipRAND fails for MT19937 with the status code 1000, + # HIPRAND_STATUS_NOT_IMPLEMENTED. We use `pytest.raises` here + # so that we will be able to find it once hipRAND implement + # MT19937 as the imperative `pytest.xfail` immediately rewinds + # the control flow and does not run the test. + with pytest.raises(KeyError) as e: + rs = cupy.random.RandomState(method=method) + assert e.value.args == (1000,) + continue + rs = cupy.random.RandomState(method=method) + rs.normal() + + +@testing.parameterize(*common_distributions.beta_params) +@testing.with_requires("numpy>=1.17.0") +@testing.fix_random() +class TestBeta(common_distributions.Beta, RandomGeneratorTestCase): + pass + + +@testing.parameterize( + {"n": 5, "p": 0.5}, + {"n": 5, "p": 0.0}, + {"n": 5, "p": 1.0}, +) +@testing.fix_random() +class TestBinomial(RandomGeneratorTestCase): + # TODO(niboshi): + # Test soundness of distribution. + # Currently only reprocibility is checked. + + target_method = "binomial" + + def test_binomial(self): + self.generate(n=self.n, p=self.p, size=(3, 2)) + + +@testing.parameterize(*common_distributions.chisquare_params) +@testing.fix_random() +class TestChisquare(common_distributions.Chisquare, RandomGeneratorTestCase): + pass + + +@testing.parameterize(*common_distributions.dirichlet_params) +@testing.fix_random() +class TestDirichlet(common_distributions.Dirichlet, RandomGeneratorTestCase): + pass + + +@testing.parameterize(*common_distributions.exponential_params) +@testing.fix_random() +class TestExponential( + common_distributions.Exponential, RandomGeneratorTestCase +): + pass + + +@testing.parameterize(*common_distributions.f_params) +@testing.fix_random() +class TestF(common_distributions.F, RandomGeneratorTestCase): + pass + + +@testing.parameterize(*common_distributions.gamma_params) +@testing.fix_random() +class TestGamma(common_distributions.Gamma, RandomGeneratorTestCase): + pass + + +@testing.parameterize(*common_distributions.geometric_params) +@testing.fix_random() +class TestGeometric(common_distributions.Geometric, RandomGeneratorTestCase): + pass + + +@testing.parameterize(*common_distributions.hypergeometric_params) +@testing.fix_random() +class TestHypergeometric( + common_distributions.Hypergeometric, RandomGeneratorTestCase +): + pass + + +@testing.fix_random() +class TestLaplace(RandomGeneratorTestCase): + + target_method = "laplace" + + def test_laplace_1(self): + self.generate() + + def test_laplace_2(self): + self.generate(0.0, 1.0, size=(3, 2)) + + @testing.for_dtypes("fd") + @_condition.repeat_with_success_at_least(10, 3) + def test_laplace_ks_1(self, dtype): + self.check_ks(0.05)(size=2000, dtype=dtype) + + @testing.for_dtypes("fd") + @_condition.repeat_with_success_at_least(10, 3) + def test_laplace_ks_2(self, dtype): + self.check_ks(0.05)(2.3, 4.5, size=2000, dtype=dtype) + + +@testing.fix_random() +class TestLogistic(RandomGeneratorTestCase): + + target_method = "logistic" + + def test_logistic_1(self): + self.generate() + + def test_logistic_2(self): + self.generate(0.0, 1.0, size=(3, 2)) + + @testing.slow + @_condition.repeat(10) + def test_standard_logistic_isfinite(self): + x = self.generate(size=10**7) + assert cupy.isfinite(x).all() + + @testing.for_dtypes("fd") + @_condition.repeat_with_success_at_least(10, 3) + def test_logistic_ks_1(self, dtype): + self.check_ks(0.05)(size=2000, dtype=dtype) + + @testing.for_dtypes("fd") + @_condition.repeat_with_success_at_least(10, 3) + def test_logistic_ks_2(self, dtype): + self.check_ks(0.05)(2.3, 4.5, size=2000, dtype=dtype) + + +@testing.parameterize( + *[ + {"args": (0.0, 1.0), "size": None}, + {"args": (10.0, 20.0), "size": None}, + {"args": (0.0, 1.0), "size": 10}, + {"args": (0.0, 1.0), "size": (1, 2, 3)}, + {"args": (0.0, 1.0), "size": 3}, + {"args": (0.0, 1.0), "size": (3, 3)}, + {"args": (0.0, 1.0), "size": ()}, + ] +) +@testing.fix_random() +class TestLogNormal(RandomGeneratorTestCase): + + target_method = "lognormal" + + def check_lognormal(self, dtype): + vals = self.generate_many( + self.args[0], self.args[1], self.size, dtype, _count=10 + ) + + shape = _get_size(self.size) + for val in vals: + assert isinstance(val, cupy.ndarray) + assert val.dtype == dtype + assert val.shape == shape + assert (0 <= val).all() + + def test_lognormal_float(self): + self.check_lognormal(float) + + def test_lognormal_float32(self): + self.check_lognormal(numpy.float32) + + def test_lognormal_float64(self): + self.check_lognormal(numpy.float64) + + @testing.for_dtypes("fd") + @_condition.repeat_with_success_at_least(10, 3) + def test_lognormal_ks(self, dtype): + self.check_ks(0.05)(*self.args, size=self.size, dtype=dtype) + + +@testing.parameterize(*common_distributions.logseries_params) +@testing.fix_random() +class TestLogseries(common_distributions.Logseries, RandomGeneratorTestCase): + pass + + +@testing.parameterize( + *[ + { + "args": ([0.0, 0.0], [[1.0, 0.0], [0.0, 1.0]]), + "size": None, + "tol": 1e-6, + }, + { + "args": ([10.0, 10.0], [[20.0, 10.0], [10.0, 20.0]]), + "size": None, + "tol": 1e-6, + }, + { + "args": ([0.0, 0.0], [[1.0, 0.0], [0.0, 1.0]]), + "size": 10, + "tol": 1e-6, + }, + { + "args": ([0.0, 0.0], [[1.0, 0.0], [0.0, 1.0]]), + "size": (1, 2, 3), + "tol": 1e-6, + }, + { + "args": ([0.0, 0.0], [[1.0, 0.0], [0.0, 1.0]]), + "size": 3, + "tol": 1e-6, + }, + { + "args": ([0.0, 0.0], [[1.0, 0.0], [0.0, 1.0]]), + "size": (3, 3), + "tol": 1e-6, + }, + { + "args": ([0.0, 0.0], [[1.0, 0.0], [0.0, 1.0]]), + "size": (), + "tol": 1e-6, + }, + ] +) +@testing.fix_random() +class TestMultivariateNormal(RandomGeneratorTestCase): + + target_method = "multivariate_normal" + + def check_multivariate_normal(self, dtype): + vals = self.generate_many( + mean=self.args[0], + cov=self.args[1], + size=self.size, + tol=self.tol, + dtype=dtype, + _count=10, + ) + + shape = _get_size(self.size) + for val in vals: + assert isinstance(val, cupy.ndarray) + assert val.dtype == dtype + assert val.shape == shape + (2,) + + def test_multivariate_normal_float32(self): + self.check_multivariate_normal(numpy.float32) + + def test_multivariate_normal_float64(self): + self.check_multivariate_normal(numpy.float64) + + # TODO(kataoka): add distribution test + + +@testing.parameterize( + {"n": 5, "p": 0.5}, +) +@testing.fix_random() +class TestNegativeBinomial(RandomGeneratorTestCase): + target_method = "negative_binomial" + + def test_negative_binomial(self): + self.generate(n=self.n, p=self.p, size=(3, 2)) + + # TODO(kataoka): add distribution test + + +@testing.parameterize( + {"df": 1.5, "nonc": 2.0}, + {"df": 2.0, "nonc": 0.0}, +) +@testing.fix_random() +class TestNoncentralChisquare(RandomGeneratorTestCase): + + target_method = "noncentral_chisquare" + + def test_noncentral_chisquare(self): + self.generate(df=self.df, nonc=self.nonc, size=(3, 2)) + + @testing.for_dtypes("fd") + @_condition.repeat_with_success_at_least(10, 3) + def test_noncentral_chisquare_ks(self, dtype): + self.check_ks(0.05)(self.df, self.nonc, size=2000, dtype=dtype) + + +@testing.parameterize( + {"dfnum": 2.0, "dfden": 3.0, "nonc": 4.0}, + {"dfnum": 2.5, "dfden": 1.5, "nonc": 0.0}, +) +@testing.fix_random() +class TestNoncentralF(RandomGeneratorTestCase): + + target_method = "noncentral_f" + + def test_noncentral_f(self): + self.generate( + dfnum=self.dfnum, dfden=self.dfden, nonc=self.nonc, size=(3, 2) + ) + + @testing.for_dtypes("fd") + @_condition.repeat_with_success_at_least(10, 3) + def test_noncentral_f_ks(self, dtype): + self.check_ks(0.05)( + self.dfnum, self.dfden, self.nonc, size=2000, dtype=dtype + ) + + +@testing.parameterize( + *[ + {"args": (0.0, 1.0), "size": None}, + {"args": (10.0, 20.0), "size": None}, + {"args": (0.0, 1.0), "size": 10}, + {"args": (0.0, 1.0), "size": (1, 2, 3)}, + {"args": (0.0, 1.0), "size": 3}, + {"args": (0.0, 1.0), "size": (3, 3)}, + {"args": (0.0, 1.0), "size": ()}, + ] +) +@testing.fix_random() +class TestNormal(RandomGeneratorTestCase): + + target_method = "normal" + + def check_normal(self, dtype): + vals = self.generate_many( + self.args[0], self.args[1], self.size, dtype, _count=10 + ) + + shape = _get_size(self.size) + for val in vals: + assert isinstance(val, cupy.ndarray) + assert val.dtype == dtype + assert val.shape == shape + + def test_normal_float32(self): + self.check_normal(numpy.float32) + + def test_normal_float64(self): + self.check_normal(numpy.float64) + + @testing.for_dtypes("fd") + @_condition.repeat_with_success_at_least(10, 3) + def test_normal_ks(self, dtype): + self.check_ks(0.05)(*self.args, size=self.size, dtype=dtype) + + +@testing.parameterize( + {"a": 1.0}, + {"a": 3.0}, + {"a": 10.0}, +) +@testing.fix_random() +class TestPareto(RandomGeneratorTestCase): + + target_method = "pareto" + + def test_pareto(self): + self.generate(a=self.a, size=(3, 2)) + + @testing.for_dtypes("fd") + @_condition.repeat_with_success_at_least(10, 3) + def test_pareto_ks(self, dtype): + self.check_ks(0.05)(a=self.a, size=2000, dtype=dtype) + + +@testing.parameterize(*common_distributions.poisson_params) +@testing.fix_random() +class TestPoisson(common_distributions.Poisson, RandomGeneratorTestCase): + pass + + +@testing.parameterize( + {"df": 1.0}, + {"df": 3.0}, + {"df": 10.0}, +) +@testing.fix_random() +class TestStandardT(RandomGeneratorTestCase): + + target_method = "standard_t" + + def test_standard_t(self): + self.generate(df=self.df, size=(3, 2)) + + @testing.for_dtypes("fd") + @_condition.repeat_with_success_at_least(10, 3) + def test_standard_t_ks(self, dtype): + self.check_ks(0.05)(df=self.df, size=2000, dtype=dtype) + + +@testing.parameterize( + *[ + {"size": None}, + {"size": 10}, + {"size": (1, 2, 3)}, + {"size": 3}, + {"size": ()}, + ] +) +@testing.fix_random() +class TestRandomSample(unittest.TestCase): + + def setUp(self): + self.rs = _generator.RandomState(seed=testing.generate_seed()) + + def check_random_sample(self, dtype): + vals = [self.rs.random_sample(self.size, dtype) for _ in range(10)] + + shape = _get_size(self.size) + for val in vals: + assert isinstance(val, cupy.ndarray) + assert val.dtype == dtype + assert val.shape == shape + assert (0 <= val).all() + assert (val < 1).all() + + def test_random_sample_float32(self): + self.check_random_sample(numpy.float32) + + def test_random_sample_float64(self): + self.check_random_sample(numpy.float64) + + +@testing.fix_random() +class TestRandomSampleDistrib(unittest.TestCase): + + @testing.for_dtypes("fd") + @_condition.repeat_with_success_at_least(10, 3) + @numpy_cupy_equal_continuous_distribution(0.05) + def test_random_sample_ks(self, xp, dtype): + return _xp_random(xp, "random_sample")(size=2000, dtype=dtype) + + +@testing.fix_random() +class TestRandAndRandN(unittest.TestCase): + + def setUp(self): + self.rs = _generator.RandomState(seed=testing.generate_seed()) + + def test_rand_invalid_argument(self): + with self.assertRaises(TypeError): + self.rs.rand(1, 2, 3, unnecessary="unnecessary_argument") + + def test_randn_invalid_argument(self): + with self.assertRaises(TypeError): + self.rs.randn(1, 2, 3, unnecessary="unnecessary_argument") + + +@testing.parameterize(*common_distributions.power_params) +@testing.fix_random() +class TestPower(common_distributions.Power, RandomGeneratorTestCase): + pass + + +@testing.parameterize( + {"scale": 1.0}, + {"scale": 3.0}, +) +@testing.fix_random() +class TestRayleigh(RandomGeneratorTestCase): + + target_method = "rayleigh" + + def test_rayleigh(self): + self.generate(scale=self.scale, size=(3, 2)) + + @testing.for_dtypes("fd") + @_condition.repeat_with_success_at_least(10, 3) + def test_rayleigh_ks(self, dtype): + self.check_ks(0.05)(scale=self.scale, size=2000, dtype=dtype) + + +@testing.fix_random() +class TestStandardCauchy(RandomGeneratorTestCase): + + target_method = "standard_cauchy" + + def test_standard_cauchy(self): + self.generate(size=(3, 2)) + + @testing.slow + @_condition.repeat(10) + def test_standard_cauchy_isfinite(self): + x = self.generate(size=10**7) + assert cupy.isfinite(x).all() + + @testing.for_dtypes("fd") + @_condition.repeat_with_success_at_least(10, 3) + def test_standard_cauchy_ks(self, dtype): + self.check_ks(0.05)(size=2000, dtype=dtype) + + +@testing.parameterize(*common_distributions.standard_gamma_params) +@testing.fix_random() +class TestStandardGamma( + common_distributions.StandardGamma, RandomGeneratorTestCase +): + pass + + +@testing.fix_random() +class TestInterval(RandomGeneratorTestCase): + + target_method = "_interval" + + def test_zero(self): + shape = (2, 3) + vals = self.generate_many(0, shape, _count=10) + for val in vals: + assert isinstance(val, cupy.ndarray) + assert val.dtype.kind in "iu" + assert val.shape == shape + assert (val == 0).all() + + def test_shape_zero(self): + mx = 10 + vals = self.generate_many(mx, None, _count=10) + for val in vals: + assert isinstance(val, cupy.ndarray) + assert val.dtype.kind in "iu" + assert val.shape == () + assert (0 <= val).all() + assert (val <= mx).all() + # TODO(niboshi): Distribution test + + def test_shape_one_dim(self): + mx = 10 + size = 20 + vals = self.generate_many(mx, size, _count=10) + for val in vals: + assert isinstance(val, cupy.ndarray) + assert val.dtype.kind in "iu" + assert val.shape == (size,) + assert (0 <= val).all() + assert (val <= mx).all() + # TODO(niboshi): Distribution test + + def test_shape_multi_dim(self): + mx = 10 + shape = (1, 2) + vals = self.generate_many(mx, shape, _count=10) + for val in vals: + assert isinstance(val, cupy.ndarray) + assert val.dtype.kind in "iu" + assert val.shape == shape + assert (0 <= val).all() + assert (val <= mx).all() + # TODO(niboshi): Distribution test + + def test_bound_1(self): + vals = self.generate_many(10, (2, 3), _count=10) + for val in vals: + assert isinstance(val, cupy.ndarray) + assert val.dtype.kind in "iu" + assert val.shape == (2, 3) + assert (0 <= val).all() + assert (val <= 10).all() + + def test_bound_2(self): + vals = self.generate_many(2, None, _count=20) + for val in vals: + assert isinstance(val, cupy.ndarray) + assert val.dtype.kind in "iu" + assert val.shape == () + assert (0 <= val).all() + assert (val <= 2).all() + + @_condition.repeat(3, 10) + def test_goodness_of_fit(self): + mx = 5 + trial = 300 + vals = self.generate_many(mx, None, _count=trial) + vals = [val.get() for val in vals] + counts = numpy.histogram(vals, bins=numpy.arange(mx + 2))[0] + expected = numpy.array([float(trial) / (mx + 1)] * (mx + 1)) + assert _hypothesis.chi_square_test(counts, expected) + + @_condition.repeat(3) + def test_goodness_of_fit_2(self): + mx = 5 + vals = self.generate(mx, (5, 5)).get() + counts = numpy.histogram(vals, bins=numpy.arange(mx + 2))[0] + expected = numpy.array([float(vals.size) / (mx + 1)] * (mx + 1)) + assert _hypothesis.chi_square_test(counts, expected) + + +@testing.fix_random() +class TestTomaxint(RandomGeneratorTestCase): + + target_method = "tomaxint" + + def test_tomaxint_none(self): + x = self.generate() + assert x.shape == () + assert (0 <= x).all() + assert (x <= cupy.iinfo(cupy.int_).max).all() + + def test_tomaxint_int(self): + x = self.generate(3) + assert x.shape == (3,) + assert (0 <= x).all() + assert (x <= cupy.iinfo(cupy.int_).max).all() + + def test_tomaxint_tuple(self): + x = self.generate((2, 3)) + assert x.shape == (2, 3) + assert (0 <= x).all() + assert (x <= cupy.iinfo(cupy.int_).max).all() + + +@testing.parameterize( + {"a": 3, "size": 2, "p": None}, + {"a": 3, "size": 2, "p": [0.3, 0.3, 0.4]}, + {"a": 3, "size": (5, 5), "p": [0.3, 0.3, 0.4]}, + {"a": 3, "size": (5, 5), "p": numpy.array([0.3, 0.3, 0.4])}, + {"a": 3, "size": (), "p": None}, + {"a": numpy.array([0.0, 1.0, 2.0]), "size": 2, "p": [0.3, 0.3, 0.4]}, + {"a": 0, "size": 0, "p": None}, + {"a": numpy.array([]), "size": 0, "p": None}, +) +@testing.fix_random() +class TestChoice1(RandomGeneratorTestCase): + + target_method = "choice" + + def test_dtype_shape(self): + v = self.generate(a=self.a, size=self.size, p=self.p) + if isinstance(self.size, int): + expected_shape = (self.size,) + else: + expected_shape = self.size + if isinstance(self.a, numpy.ndarray): + expected_dtype = "float" + else: + expected_dtype = "int64" + assert v.dtype == expected_dtype + assert v.shape == expected_shape + + @_condition.repeat(3, 10) + def test_bound(self): + vals = self.generate_many(a=self.a, size=self.size, p=self.p, _count=20) + vals = [val.get() for val in vals] + size_ = self.size if isinstance(self.size, tuple) else (self.size,) + if size_ == (0,): + self.skipTest("no bound check for empty `random.choice`") + for val in vals: + assert val.shape == size_ + assert min(val.min() for val in vals) == 0 + assert max(val.max() for val in vals) == 2 + + +@testing.parameterize( + {"a": [0, 1, 2], "size": 2, "p": [0.3, 0.3, 0.4]}, +) +@testing.fix_random() +class TestChoice2(RandomGeneratorTestCase): + + target_method = "choice" + + def test_dtype_shape(self): + v = self.generate(a=self.a, size=self.size, p=self.p) + if isinstance(self.size, int): + expected_shape = (self.size,) + else: + expected_shape = self.size + if isinstance(self.a, numpy.ndarray): + expected_dtype = "float" + else: + expected_dtype = "int" + assert v.dtype == expected_dtype + assert v.shape == expected_shape + + @_condition.repeat(3, 10) + def test_bound(self): + vals = self.generate_many(a=self.a, size=self.size, p=self.p, _count=20) + vals = [val.get() for val in vals] + size_ = self.size if isinstance(self.size, tuple) else (self.size,) + for val in vals: + assert val.shape == size_ + assert min(val.min() for val in vals) == 0 + assert max(val.max() for val in vals) == 2 + + +@testing.fix_random() +class TestChoiceChi(RandomGeneratorTestCase): + + target_method = "choice" + + @_condition.repeat_with_success_at_least(10, 9) + def test_goodness_of_fit(self): + trial = 100 + vals = self.generate_many(3, 1, True, [0.3, 0.3, 0.4], _count=trial) + vals = [val.get() for val in vals] + counts = numpy.histogram(vals, bins=numpy.arange(4))[0] + expected = numpy.array([30, 30, 40]) + assert _hypothesis.chi_square_test(counts, expected) + + @_condition.repeat(3, 10) + def test_goodness_of_fit_2(self): + vals = self.generate(3, (5, 20), True, [0.3, 0.3, 0.4]).get() + counts = numpy.histogram(vals, bins=numpy.arange(4))[0] + expected = numpy.array([30, 30, 40]) + assert _hypothesis.chi_square_test(counts, expected) + + +@testing.fix_random() +class TestChoiceMultinomial(unittest.TestCase): + + @_condition.repeat(3, 10) + @testing.for_float_dtypes() + @testing.numpy_cupy_allclose(atol=0.02) + def test_choice_multinomial(self, xp, dtype): + p = xp.array([0.5, 0.25, 0.125, 0.125], dtype) + trial = 10000 + x = xp.random.choice(len(p), trial, p=p) + y = xp.bincount(x).astype("f") / trial + return y + + +@testing.parameterize( + {"a": 3.1, "size": 1, "p": [0.1, 0.1, 0.8]}, + {"a": None, "size": 1, "p": [0.1, 0.1, 0.8]}, + {"a": -3, "size": 1, "p": [0.1, 0.1, 0.8]}, + {"a": [[0, 1], [2, 3]], "size": 1, "p": [[0.1, 0.2], [0.3, 0.4]]}, + {"a": [[0, 1], [2, 3]], "size": 1, "p": [0.3, 0.7]}, + {"a": [], "size": 1, "p": [0.1, 0.1, 0.8]}, + {"a": 4, "size": 1, "p": [[0.1, 0.2], [0.3, 0.4]]}, + {"a": 2, "size": 1, "p": [0.1, 0.1, 0.8]}, + {"a": 3, "size": 1, "p": [-0.1, 0.3, 0.8]}, + {"a": 3, "size": 1, "p": [0.1, 0.1, 0.7]}, +) +@testing.fix_random() +class TestChoiceFailure(unittest.TestCase): + + def setUp(self): + self.rs = _generator.RandomState(seed=testing.generate_seed()) + + def test_choice_invalid_value(self): + with self.assertRaises(ValueError): + self.rs.choice(a=self.a, size=self.size, p=self.p) + + +@testing.parameterize( + {"a": 5, "size": 2}, + {"a": 5, "size": (2, 2)}, + {"a": 5, "size": ()}, + {"a": numpy.array([0.0, 2.0, 4.0]), "size": 2}, +) +@testing.fix_random() +class TestChoiceReplaceFalse(RandomGeneratorTestCase): + + target_method = "choice" + + def test_dtype_shape(self): + v = self.generate(a=self.a, size=self.size, replace=False) + if isinstance(self.size, int): + expected_shape = (self.size,) + else: + expected_shape = self.size + if isinstance(self.a, numpy.ndarray): + expected_dtype = "float" + else: + expected_dtype = "int" + assert v.dtype == expected_dtype + assert v.shape == expected_shape + + @_condition.repeat(3, 10) + def test_bound(self): + val = self.generate(a=self.a, size=self.size, replace=False).get() + size = self.size if isinstance(self.size, tuple) else (self.size,) + assert val.shape == size + assert (0 <= val).all() + assert (val < 5).all() + val = numpy.asarray(val) + assert numpy.unique(val).size == val.size + + +@testing.fix_random() +class TestGumbel(RandomGeneratorTestCase): + + target_method = "gumbel" + + def test_gumbel_1(self): + self.generate() + + def test_gumbel_2(self): + self.generate(0.0, 1.0, size=(3, 2)) + + @testing.for_dtypes("fd") + @_condition.repeat_with_success_at_least(10, 3) + def test_gumbel_ks_1(self, dtype): + self.check_ks(0.05)(size=2000, dtype=dtype) + + @testing.for_dtypes("fd") + @_condition.repeat_with_success_at_least(10, 3) + def test_gumbel_ks_2(self, dtype): + self.check_ks(0.05)(2.3, 4.5, size=2000, dtype=dtype) + + +@testing.fix_random() +class TestRandint(RandomGeneratorTestCase): + # TODO(niboshi): + # Test soundness of distribution. + # Currently only reprocibility is checked. + + target_method = "randint" + + def test_randint_1(self): + self.generate(3) + + def test_randint_2(self): + self.generate(3, 4, size=(3, 2)) + + def test_randint_empty1(self): + self.generate(3, 10, size=0) + + def test_randint_empty2(self): + self.generate(3, size=(4, 0, 5)) + + def test_randint_overflow(self): + self.generate(numpy.int8(-100), numpy.int8(100)) + + def test_randint_float1(self): + self.generate(-1.2, 3.4, 5) + + def test_randint_float2(self): + self.generate(6.7, size=(2, 3)) + + @pytest.mark.xfail( + numpy.__version__ < "2", + reason="XXX: np 2.0: comparisons with OOB " + "ints are broken in numpy < 2", + ) + def test_randint_int64_1(self): + self.generate(2**34, 2**40, 3, dtype="q") + + def test_randint_array(self): + self.generate([[[-1], [0]], [[-2], [1]], [[3], [4]]], [[10, 11, 12]]) + + +@testing.fix_random() +class TestUniform(RandomGeneratorTestCase): + + target_method = "uniform" + + def test_uniform_1(self): + self.generate() + + def test_uniform_2(self): + self.generate(-4.2, 2.4, size=(3, 2)) + + def test_uniform_broadcast(self): + self.generate([[2, 3]], [4]) + + @testing.for_dtypes("fd") + @_condition.repeat_with_success_at_least(10, 3) + def test_uniform_ks_1(self, dtype): + self.check_ks(0.05)(size=2000, dtype=dtype) + + @testing.for_dtypes("fd") + @_condition.repeat_with_success_at_least(10, 3) + def test_uniform_ks_2(self, dtype): + self.check_ks(0.05)(-4.2, 2.4, size=2000, dtype=dtype) + + +@testing.parameterize( + {"mu": 0.0, "kappa": 1.0}, + {"mu": 3.0, "kappa": 3.0}, + {"mu": 3.0, "kappa": 1.0}, +) +@testing.fix_random() +class TestVonmises(RandomGeneratorTestCase): + + target_method = "vonmises" + + def test_vonmises(self): + self.generate(mu=self.mu, kappa=self.kappa, size=(3, 2)) + + @testing.for_dtypes("fd") + @_condition.repeat_with_success_at_least(10, 3) + def test_vonmises_ks(self, dtype): + self.check_ks(0.05)(self.mu, self.kappa, size=2000, dtype=dtype) + + +@testing.parameterize( + {"mean": 1.0, "scale": 3.0}, + {"mean": 3.0, "scale": 3.0}, + {"mean": 3.0, "scale": 1.0}, +) +@testing.fix_random() +class TestWald(RandomGeneratorTestCase): + + target_method = "wald" + + def test_wald(self): + self.generate(mean=self.mean, scale=self.scale, size=(3, 2)) + + @testing.for_dtypes("fd") + @_condition.repeat_with_success_at_least(10, 3) + def test_wald_ks(self, dtype): + self.check_ks(0.05)(self.mean, self.scale, size=2000, dtype=dtype) + + +@testing.parameterize( + {"a": 0.5}, + {"a": 1.0}, + {"a": 3.0}, + {"a": numpy.inf}, +) +@testing.fix_random() +class TestWeibull(RandomGeneratorTestCase): + + target_method = "weibull" + + def test_weibull(self): + self.generate(a=self.a, size=(3, 2)) + + def test_weibull_size_none(self): + self.generate([[0.5, 1.0, 3.0]], size=None) + + @testing.for_dtypes("fd") + @_condition.repeat_with_success_at_least(10, 3) + def test_weibull_ks(self, dtype): + self.check_ks(0.05)(a=self.a, size=2000, dtype=dtype) + + +@testing.parameterize( + {"a": 2.0}, +) +@testing.fix_random() +class TestZipf(RandomGeneratorTestCase): + + target_method = "zipf" + + def test_zipf(self): + self.generate(a=self.a, size=(3, 2)) + + # TODO(kataoka): add distribution test + + +@testing.parameterize( + {"a": 3, "size": 5}, + {"a": [1, 2, 3], "size": 5}, +) +@testing.fix_random() +class TestChoiceReplaceFalseFailure(unittest.TestCase): + + def test_choice_invalid_value(self): + for xp in (numpy, cupy): + rs = xp.random.RandomState(seed=testing.generate_seed()) + with pytest.raises(ValueError): + rs.choice(a=self.a, size=self.size, replace=False) + + +class TestResetStates(unittest.TestCase): + + def test_reset_states(self): + _generator._random_states = "dummy" + _generator.reset_states() + assert {} == _generator._random_states + + +class TestGetRandomState(unittest.TestCase): + + def setUp(self): + self.device_id = cuda.Device().id + self.rs_tmp = _generator._random_states + + def tearDown(self, *args): + _generator._random_states = self.rs_tmp + + def test_get_random_state_initialize(self): + _generator._random_states = {} + rs = _generator.get_random_state() + assert _generator._random_states[self.device_id] == rs + + def test_get_random_state_memoized(self): + _generator._random_states = { + self.device_id: "expected", + self.device_id + 1: "dummy", + } + rs = _generator.get_random_state() + assert "expected" == _generator._random_states[self.device_id] + assert "dummy" == _generator._random_states[self.device_id + 1] + assert "expected" == rs + + +class TestSetRandomState(unittest.TestCase): + + def setUp(self): + self.rs_tmp = _generator._random_states + + def tearDown(self, *args): + _generator._random_states = self.rs_tmp + + def test_set_random_state(self): + rs = _generator.RandomState() + _generator.set_random_state(rs) + assert _generator.get_random_state() is rs + + def test_set_random_state_call_multiple_times(self): + _generator.set_random_state(_generator.RandomState()) + rs = _generator.RandomState() + _generator.set_random_state(rs) + assert _generator.get_random_state() is rs + + +@testing.fix_random() +class TestStandardExponential( + common_distributions.StandardExponential, RandomGeneratorTestCase +): + pass + + +@testing.parameterize( + {"left": -1.0, "mode": 0.0, "right": 2.0}, +) +@testing.fix_random() +class TestTriangular(RandomGeneratorTestCase): + + target_method = "triangular" + + def test_triangular(self): + self.generate( + left=self.left, mode=self.mode, right=self.right, size=(3, 2) + ) + + +class TestRandomStateThreadSafe(unittest.TestCase): + + def setUp(self): + cupy.random.reset_states() + + def test_get_random_state_thread_safe(self): + def _f(func, args=()): + cupy.cuda.Device().use() + func(*args) + + seed = 10 + threads = [ + threading.Thread(target=_f, args=(cupy.random.seed, (seed,))), + threading.Thread(target=_f, args=(cupy.random.get_random_state,)), + threading.Thread(target=_f, args=(cupy.random.get_random_state,)), + threading.Thread(target=_f, args=(cupy.random.get_random_state,)), + threading.Thread(target=_f, args=(cupy.random.get_random_state,)), + threading.Thread(target=_f, args=(cupy.random.get_random_state,)), + threading.Thread(target=_f, args=(cupy.random.get_random_state,)), + ] + + for t in threads: + t.start() + for t in threads: + t.join() + + actual = cupy.random.uniform() + cupy.random.seed(seed) + expected = cupy.random.uniform() + assert actual == expected + + def test_set_random_state_thread_safe(self): + def _f(func, args=()): + cupy.cuda.Device().use() + func(*args) + + rs = cupy.random.RandomState() + threads = [ + threading.Thread( + target=_f, args=(cupy.random.set_random_state, (rs,)) + ), + threading.Thread( + target=_f, args=(cupy.random.set_random_state, (rs,)) + ), + ] + + for t in threads: + t.start() + for t in threads: + t.join() + + assert cupy.random.get_random_state() is rs + + +class TestGetRandomState2(unittest.TestCase): + + def setUp(self): + self.rs_dict = _generator._random_states + _generator._random_states = {} + self.cupy_seed = os.getenv("CUPY_SEED") + + def tearDown(self, *args): + _generator._random_states = self.rs_dict + if self.cupy_seed is None: + os.environ.pop("CUPY_SEED", None) + else: + os.environ["CUPY_SEED"] = self.cupy_seed + + def test_get_random_state_no_cupy(self): + os.environ.pop("CUPY_SEED", None) + rvs0 = self._get_rvs_reset() + rvs1 = self._get_rvs_reset() + + self._check_different(rvs0, rvs1) + + def test_get_random_state_with_cupy(self): + rvs0 = self._get_rvs(_generator.RandomState(6)) + + os.environ["CUPY_SEED"] = "6" + rvs1 = self._get_rvs_reset() + + self._check_same(rvs0, rvs1) + + def _get_rvs(self, rs): + rvu = rs.rand(4) + rvn = rs.randn(4) + return rvu, rvn + + def _get_rvs_reset(self): + _generator.reset_states() + return self._get_rvs(_generator.get_random_state()) + + def _check_same(self, rvs0, rvs1): + for rv0, rv1 in zip(rvs0, rvs1): + testing.assert_array_equal(rv0, rv1) + + def _check_different(self, rvs0, rvs1): + for rv0, rv1 in zip(rvs0, rvs1): + for r0, r1 in zip(rv0, rv1): + assert r0 != r1 + + +class TestCheckAndGetDtype(unittest.TestCase): + + @testing.for_float_dtypes(no_float16=True) + def test_float32_64_type(self, dtype): + assert _generator._check_and_get_dtype(dtype) == numpy.dtype(dtype) + + def test_float16(self): + with self.assertRaises(TypeError): + _generator._check_and_get_dtype(numpy.float16) + + @testing.for_int_dtypes() + def test_int_type(self, dtype): + with self.assertRaises(TypeError): + _generator._check_and_get_dtype(dtype) diff --git a/tests/third_party/cupy/random_tests/test_generator_api.py b/tests/third_party/cupy/random_tests/test_generator_api.py new file mode 100644 index 00000000000..7db45bfeaf8 --- /dev/null +++ b/tests/third_party/cupy/random_tests/test_generator_api.py @@ -0,0 +1,338 @@ +import threading +import unittest + +import numpy +import pytest + +import dpnp as cupy +from dpnp import random +from tests.third_party.cupy import testing +from tests.third_party.cupy.testing import _condition + +from . import common_distributions + +pytest.skip("random.Generator() is not supported yet", allow_module_level=True) + + +class GeneratorTestCase(common_distributions.BaseGeneratorTestCase): + + target_method = None + + def get_rng(self, xp, seed): + if xp is cupy: + return cupy.random.Generator( + random._bit_generator.Philox4x3210(seed=seed) + ) + else: + return numpy.random.Generator(numpy.random.MT19937(seed)) + + def set_rng_seed(self, seed): + self.rng.bit_generator = random._bit_generator.Philox4x3210(seed=seed) + + +class InvalidOutsMixin: + + def invalid_dtype_out(self, **kwargs): + out = cupy.zeros((3, 2), dtype=cupy.float32) + with pytest.raises(TypeError): + self.generate(size=(3, 2), out=out, **kwargs) + + def invalid_contiguity(self, **kwargs): + out = cupy.zeros((4, 6), dtype=cupy.float64)[0:3:, 0:2:] + with pytest.raises(ValueError): + self.generate(size=(3, 2), out=out, **kwargs) + + def invalid_shape(self, **kwargs): + out = cupy.zeros((3, 3), dtype=cupy.float64) + with pytest.raises(ValueError): + self.generate(size=(3, 2), out=out, **kwargs) + + def test_invalid_dtype_out(self): + self.invalid_dtype_out() + + def test_invalid_contiguity(self): + self.invalid_contiguity() + + def test_invalid_shape(self): + self.invalid_shape() + + +@testing.parameterize(*common_distributions.uniform_params) +@testing.with_requires("numpy>=1.17.0") +@testing.fix_random() +class TestUniform(common_distributions.Uniform, GeneratorTestCase): + pass + + +@testing.parameterize(*common_distributions.exponential_params) +@testing.with_requires("numpy>=1.17.0") +@testing.fix_random() +class TestExponential(common_distributions.Exponential, GeneratorTestCase): + pass + + +@testing.parameterize(*common_distributions.poisson_params) +@testing.with_requires("numpy>=1.17.0") +@testing.fix_random() +class TestPoisson(common_distributions.Poisson, GeneratorTestCase): + pass + + +@testing.parameterize(*common_distributions.binomial_params) +@testing.with_requires("numpy>=1.17.0") +@testing.fix_random() +class TestBinomial(common_distributions.Binomial, GeneratorTestCase): + pass + + +@testing.parameterize(*common_distributions.beta_params) +@testing.with_requires("numpy>=1.17.0") +@testing.fix_random() +class TestBeta(common_distributions.Beta, GeneratorTestCase): + pass + + +@testing.with_requires("numpy>=1.17.0") +@testing.fix_random() +class TestStandardExponential( + InvalidOutsMixin, + common_distributions.StandardExponential, + GeneratorTestCase, +): + pass + + +@testing.parameterize(*common_distributions.gamma_params) +@testing.fix_random() +class TestGamma( + common_distributions.Gamma, + GeneratorTestCase, +): + pass + + +@testing.parameterize(*common_distributions.standard_gamma_params) +@testing.fix_random() +class TestStandardGamma( + common_distributions.StandardGamma, + GeneratorTestCase, +): + pass + + +@testing.fix_random() +class TestStandardGammaInvalid(InvalidOutsMixin, GeneratorTestCase): + + target_method = "standard_gamma" + + def test_invalid_dtype_out(self): + self.invalid_dtype_out(shape=1.0) + + def test_invalid_contiguity(self): + self.invalid_contiguity(shape=1.0) + + out = cupy.zeros((4, 6), order="F", dtype=cupy.float64) + with pytest.raises(ValueError): + self.generate(size=(4, 6), out=out, shape=1.0) + + def test_invalid_shape(self): + self.invalid_shape(shape=1.0) + + def test_invalid_dtypes(self): + for dtype in "bhiqleFD": + with pytest.raises(TypeError): + self.generate(size=(3, 2), shape=1.0, dtype=dtype) + + +@testing.fix_random() +class TestStandardGammaEmpty(GeneratorTestCase): + + target_method = "standard_gamma" + + def test_empty_shape(self): + y = self.generate(shape=cupy.empty((1, 0))) + assert y.shape == (1, 0) + + def test_empty_size(self): + y = self.generate(1.0, size=(1, 0)) + assert y.shape == (1, 0) + + def test_empty_out(self): + out = cupy.empty((1, 0)) + y = self.generate(cupy.empty((1, 0)), out=out) + assert y is out + assert y.shape == (1, 0) + + +@testing.with_requires("numpy>=1.17.0") +@testing.parameterize(*common_distributions.standard_normal_params) +@testing.fix_random() +class TestStandardNormal( + common_distributions.StandardNormal, GeneratorTestCase +): + pass + + +@testing.with_requires("numpy>=1.17.0") +@testing.fix_random() +class TestStandardNormalInvalid(InvalidOutsMixin, GeneratorTestCase): + + target_method = "standard_normal" + + def test_invalid_dtypes(self): + for dtype in "bhiqleFD": + with pytest.raises(TypeError): + self.generate(size=(3, 2), dtype=dtype) + + +@testing.with_requires("numpy>=1.17.0") +@testing.fix_random() +class TestIntegers(GeneratorTestCase): + target_method = "integers" + + def test_integers_1(self): + self.generate(3) + + def test_integers_2(self): + self.generate(3, 4, size=(3, 2)) + + def test_integers_empty1(self): + self.generate(3, 10, size=0) + + def test_integers_empty2(self): + self.generate(3, size=(4, 0, 5)) + + def test_integers_overflow(self): + self.generate(numpy.int8(-100), numpy.int8(100)) + + def test_integers_float1(self): + self.generate(-1.2, 3.4, 5) + + def test_integers_float2(self): + self.generate(6.7, size=(2, 3)) + + def test_integers_int64_1(self): + self.generate(2**34, 2**40, 3) + + @_condition.repeat_with_success_at_least(10, 3) + def test_integers_ks(self): + self.check_ks(0.05)(low=100, high=1000, size=2000) + + @_condition.repeat_with_success_at_least(10, 3) + def test_integers_ks_low(self): + self.check_ks(0.05)(low=100, size=2000) + + @_condition.repeat_with_success_at_least(10, 3) + def test_integers_ks_large(self): + self.check_ks(0.05)(low=2**34, high=2**40, size=2000) + + @_condition.repeat_with_success_at_least(10, 3) + def test_integers_ks_large2(self): + self.check_ks(0.05)(2**40, size=2000) + + +@testing.with_requires("numpy>=1.17.0") +@testing.fix_random() +class TestRandom(InvalidOutsMixin, GeneratorTestCase): + # TODO(niboshi): + # Test soundness of distribution. + # Currently only reprocibility is checked. + + target_method = "random" + + def test_random(self): + self.generate(3) + + @testing.for_dtypes("fd") + @_condition.repeat_with_success_at_least(10, 3) + def test_random_ks(self, dtype): + self.check_ks(0.05)(size=2000, dtype=dtype) + + +@testing.parameterize(*common_distributions.geometric_params) +@testing.with_requires("numpy>=1.17.0") +@testing.fix_random() +class TestGeometric(common_distributions.Geometric, GeneratorTestCase): + pass + + +@testing.parameterize(*common_distributions.hypergeometric_params) +@testing.with_requires("numpy>=1.17.0") +@testing.fix_random() +class TestHypergeometric( + common_distributions.Hypergeometric, GeneratorTestCase +): + pass + + +@testing.parameterize(*common_distributions.power_params) +@testing.fix_random() +class TestPower(common_distributions.Power, GeneratorTestCase): + pass + + +@testing.with_requires("numpy>=1.17.0") +@pytest.mark.skipif( + cupy.cuda.runtime.is_hip + and (int(str(cupy.cuda.runtime.runtimeGetVersion())[:3]) < 403), + reason="HIP<4.3 not supported ", +) +class TestRandomStateThreadSafe(unittest.TestCase): + + def test_default_rng_thread_safe(self): + def _f(func, args=()): + cupy.cuda.Device().use() + func(*args) + + seed = 10 + threads = [ + threading.Thread( + target=_f, args=(cupy.random.default_rng, (seed,)) + ), + threading.Thread(target=_f, args=(cupy.random.default_rng)), + threading.Thread(target=_f, args=(cupy.random.default_rng)), + threading.Thread(target=_f, args=(cupy.random.default_rng)), + threading.Thread(target=_f, args=(cupy.random.default_rng)), + threading.Thread(target=_f, args=(cupy.random.default_rng)), + threading.Thread(target=_f, args=(cupy.random.default_rng)), + ] + + for t in threads: + t.start() + for t in threads: + t.join() + + actual = cupy.random.default_rng(seed).standard_exponential() + expected = cupy.random.default_rng(seed).standard_exponential() + assert actual == expected + + +@testing.parameterize(*common_distributions.logseries_params) +@testing.fix_random() +class TestLogseries(common_distributions.Logseries, GeneratorTestCase): + pass + + +@testing.parameterize(*common_distributions.chisquare_params) +@testing.fix_random() +class TestChisquare(common_distributions.Chisquare, GeneratorTestCase): + pass + + +@testing.parameterize(*common_distributions.f_params) +@testing.fix_random() +class TestF(common_distributions.F, GeneratorTestCase): + pass + + +@testing.parameterize(*common_distributions.dirichlet_params) +@testing.fix_random() +class TestDrichlet(common_distributions.Dirichlet, GeneratorTestCase): + pass + + +@testing.slow +class TestLarge: + def test_large(self): + gen = random.Generator(random.XORWOW(1234)) + gen.random(2**31 + 1, dtype=cupy.int8) From 3fb0a8a34799ec0cea33dead906bd17f8a4869f8 Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Fri, 15 Nov 2024 14:51:43 +0100 Subject: [PATCH 25/29] Add test_bit_generator.py --- .../cupy/random_tests/test_bit_generator.py | 80 +++++++++++++++++++ 1 file changed, 80 insertions(+) create mode 100644 tests/third_party/cupy/random_tests/test_bit_generator.py diff --git a/tests/third_party/cupy/random_tests/test_bit_generator.py b/tests/third_party/cupy/random_tests/test_bit_generator.py new file mode 100644 index 00000000000..3c900683bb3 --- /dev/null +++ b/tests/third_party/cupy/random_tests/test_bit_generator.py @@ -0,0 +1,80 @@ +import unittest + +import numpy +import pytest + +import dpnp as cupy +from dpnp import random +from tests.third_party.cupy import testing + +pytest.skip("bit generator is not supported yet", allow_module_level=True) + + +class BitGeneratorTestCase: + + def setUp(self): + self.seed = testing.generate_seed() + + def check_seed(self, seed): + bg1 = self.bg(seed) + bg2 = self.bg(seed) + bg3 = self.bg(None) + + xs1 = bg1.random_raw(10) + xs2 = bg2.random_raw(10) + xs3 = bg3.random_raw(10) + + # Random state must be reproducible + assert cupy.array_equal(xs1, xs2) + # Random state must be initialized randomly with seed=None + assert not cupy.array_equal(xs1, xs3) + + @testing.for_int_dtypes(no_bool=True) + def test_seed_not_none(self, dtype): + self.check_seed(dtype(0)) + + @testing.for_dtypes([numpy.complex128]) + def test_seed_invalid_type_complex(self, dtype): + with self.assertRaises(TypeError): + self.bg(dtype(0)) + + @testing.for_float_dtypes() + def test_seed_invalid_type_float(self, dtype): + with self.assertRaises(TypeError): + self.bg(dtype(0)) + + def test_array_seed(self): + self.check_seed(numpy.random.randint(0, 2**31, size=10)) + + +@testing.with_requires("numpy>=1.17.0") +@testing.fix_random() +@pytest.mark.skipif( + cupy.cuda.runtime.is_hip, reason="HIP does not support this" +) +class TestBitGeneratorXORWOW(BitGeneratorTestCase, unittest.TestCase): + def setUp(self): + super().setUp() + self.bg = random._bit_generator.XORWOW + + +@testing.with_requires("numpy>=1.17.0") +@testing.fix_random() +@pytest.mark.skipif( + cupy.cuda.runtime.is_hip, reason="HIP does not support this" +) +class TestBitGeneratorMRG32k3a(BitGeneratorTestCase, unittest.TestCase): + def setUp(self): + super().setUp() + self.bg = random._bit_generator.MRG32k3a + + +@testing.with_requires("numpy>=1.17.0") +@testing.fix_random() +@pytest.mark.skipif( + cupy.cuda.runtime.is_hip, reason="HIP does not support this" +) +class TestBitGeneratorPhilox4x3210(BitGeneratorTestCase, unittest.TestCase): + def setUp(self): + super().setUp() + self.bg = random._bit_generator.Philox4x3210 From ef68961f404e2e335c485a2dad3364464d50bb9f Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Fri, 15 Nov 2024 16:21:16 +0100 Subject: [PATCH 26/29] Update random_tests/test_distributions.py --- .../cupy/random_tests/test_distributions.py | 519 ++++++++++++------ tests/third_party/cupy/testing/_loops.py | 6 +- 2 files changed, 349 insertions(+), 176 deletions(-) diff --git a/tests/third_party/cupy/random_tests/test_distributions.py b/tests/third_party/cupy/random_tests/test_distributions.py index d73fa996139..049113b3f76 100644 --- a/tests/third_party/cupy/random_tests/test_distributions.py +++ b/tests/third_party/cupy/random_tests/test_distributions.py @@ -1,32 +1,48 @@ -import unittest - import numpy import pytest import dpnp as cupy from dpnp import random as _distributions +from tests.helper import has_support_aspect64 from tests.third_party.cupy import testing -from tests.third_party.cupy.testing import _helper, _loops -_regular_float_dtypes = (numpy.float64, numpy.float32) +if has_support_aspect64(): + _regular_float_dtypes = (numpy.float64, numpy.float32) +else: + _regular_float_dtypes = (numpy.float32,) _float_dtypes = _regular_float_dtypes + (numpy.float16,) _signed_dtypes = tuple(numpy.dtype(i).type for i in "bhilq") _unsigned_dtypes = tuple(numpy.dtype(i).type for i in "BHILQ") _int_dtypes = _signed_dtypes + _unsigned_dtypes -class RandomDistributionsTestCase(unittest.TestCase): - def check_distribution(self, dist_name, params): +class RandomDistributionsTestCase: + def check_distribution(self, dist_name, params, dtype=None): cp_params = {k: cupy.asarray(params[k]) for k in params} np_out = numpy.asarray( - getattr(numpy.random, dist_name)(size=self.shape, **params) + getattr(numpy.random, dist_name)(size=self.shape, **params), dtype ) + dt_kward = {dtype: dtype} if dtype else {} cp_out = getattr(_distributions, dist_name)( - size=self.shape, **cp_params + size=self.shape, **dt_kward, **cp_params ) - - self.assertEqual(cp_out.shape, np_out.shape) - self.assertEqual(cp_out.dtype, np_out.dtype) + if np_out.ndim > 0: + assert cp_out.shape == np_out.shape + if np_out.dtype == numpy.float64 and has_support_aspect64(): + assert cp_out.dtype == np_out.dtype + else: + assert cp_out.dtype.kind == np_out.dtype.kind + + def check_generator_distribution(self, dist_name, params, dtype): + cp_params = {k: cupy.asarray(params[k]) for k in params} + np_gen = numpy.random.default_rng(0) + cp_gen = cupy.random.default_rng(0) + np_out = numpy.asarray( + getattr(np_gen, dist_name)(size=self.shape, **params) + ) + cp_out = getattr(cp_gen, dist_name)(size=self.shape, **cp_params) + assert cp_out.shape == np_out.shape + assert cp_out.dtype == np_out.dtype @testing.parameterize( @@ -35,18 +51,19 @@ def check_distribution(self, dist_name, params): "shape": [(4, 3, 2), (3, 2)], "a_shape": [(), (3, 2)], "b_shape": [(), (3, 2)], + # "dtype": _float_dtypes, # to escape timeout + "dtype": [None], # no dtype supported } ) ) @pytest.mark.usefixtures("allow_fall_back_on_numpy") class TestDistributionsBeta(RandomDistributionsTestCase): - @_loops.for_dtypes_combination( - _regular_float_dtypes, names=["a_dtype", "b_dtype"] - ) + + @testing.for_dtypes_combination(_float_dtypes, names=["a_dtype", "b_dtype"]) def test_beta(self, a_dtype, b_dtype): a = numpy.full(self.a_shape, 3, dtype=a_dtype) b = numpy.full(self.b_shape, 3, dtype=b_dtype) - self.check_distribution("beta", {"a": a, "b": b}) + self.check_distribution("beta", {"a": a, "b": b}, self.dtype) @testing.parameterize( @@ -60,8 +77,9 @@ def test_beta(self, a_dtype, b_dtype): ) ) class TestDistributionsBinomial(RandomDistributionsTestCase): - @_loops.for_signed_dtypes("n_dtype") - @_loops.for_float_dtypes("p_dtype") + + @testing.for_signed_dtypes("n_dtype") + @testing.for_float_dtypes("p_dtype") def test_binomial(self, n_dtype, p_dtype): if numpy.dtype("l") == numpy.int32 and n_dtype == numpy.int64: pytest.skip("n must be able to cast to long") @@ -79,15 +97,16 @@ def test_binomial(self, n_dtype, p_dtype): ) ) @pytest.mark.usefixtures("allow_fall_back_on_numpy") -class TestDistributionsChisquare(unittest.TestCase): +class TestDistributionsChisquare: + def check_distribution(self, dist_func, df_dtype): df = cupy.full(self.df_shape, 5, dtype=df_dtype) out = dist_func(df, self.shape) - self.assertEqual(self.shape, out.shape) - # numpy and dpdp output dtype is float64 - self.assertEqual(out.dtype, numpy.float64) + assert self.shape == out.shape + # assert out.dtype == dtype - @_loops.for_float_dtypes("df_dtype") + @testing.for_float_dtypes("df_dtype") + # @testing.for_float_dtypes("dtype") # dtype is not supported def test_chisquare(self, df_dtype): self.check_distribution(_distributions.chisquare, df_dtype) @@ -102,7 +121,10 @@ def test_chisquare(self, df_dtype): ) @pytest.mark.usefixtures("allow_fall_back_on_numpy") class TestDistributionsDirichlet(RandomDistributionsTestCase): - @_loops.for_dtypes_combination(_regular_float_dtypes, names=["alpha_dtype"]) + + @testing.for_dtypes_combination( + _float_dtypes, names=["alpha_dtype"] # dtype is not supported + ) def test_dirichlet(self, alpha_dtype): alpha = numpy.ones(self.alpha_shape, dtype=alpha_dtype) self.check_distribution("dirichlet", {"alpha": alpha}) @@ -118,7 +140,9 @@ def test_dirichlet(self, alpha_dtype): ) @pytest.mark.usefixtures("allow_fall_back_on_numpy") class TestDistributionsExponential(RandomDistributionsTestCase): - @_loops.for_float_dtypes("scale_dtype") + + # @testing.for_float_dtypes("dtype", no_float16=True) # dtype is not supported + @testing.for_float_dtypes("scale_dtype") def test_exponential(self, scale_dtype): scale = numpy.ones(self.scale_shape, dtype=scale_dtype) self.check_distribution("exponential", {"scale": scale}) @@ -126,9 +150,10 @@ def test_exponential(self, scale_dtype): @pytest.mark.usefixtures("allow_fall_back_on_numpy") class TestDistributionsExponentialError(RandomDistributionsTestCase): + def test_negative_scale(self): scale = cupy.array([2, -1, 3], dtype=numpy.float32) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): cupy.random.exponential(scale) @@ -138,49 +163,75 @@ def test_negative_scale(self): "shape": [(4, 3, 2), (3, 2)], "dfnum_shape": [(), (3, 2)], "dfden_shape": [(), (3, 2)], + # "dtype": _float_dtypes, # to escape timeout + "dtype": [None], # no dtype supported } ) ) @pytest.mark.usefixtures("allow_fall_back_on_numpy") -class TestDistributionsF(unittest.TestCase): - def check_distribution(self, dist_func, dfnum_dtype, dfden_dtype): +class TestDistributionsF: + + def check_distribution(self, dist_func, dfnum_dtype, dfden_dtype, dtype): dfnum = cupy.ones(self.dfnum_shape, dtype=dfnum_dtype) dfden = cupy.ones(self.dfden_shape, dtype=dfden_dtype) out = dist_func(dfnum, dfden, self.shape) - self.assertEqual(self.shape, out.shape) - # numpy and dpdp output dtype is float64 - self.assertEqual(out.dtype, numpy.float64) + assert self.shape == out.shape + # assert out.dtype == dtype - @_loops.for_float_dtypes("dfnum_dtype") - @_loops.for_float_dtypes("dfden_dtype") + @testing.for_float_dtypes("dfnum_dtype") + @testing.for_float_dtypes("dfden_dtype") def test_f(self, dfnum_dtype, dfden_dtype): - self.check_distribution(_distributions.f, dfnum_dtype, dfden_dtype) + self.check_distribution( + _distributions.f, dfnum_dtype, dfden_dtype, self.dtype + ) @testing.parameterize( *testing.product( { - "shape": [(4, 3, 2), (3, 2)], + "shape": [(4, 3, 2), (3, 2), None], "shape_shape": [(), (3, 2)], "scale_shape": [(), (3, 2)], + # "dtype": _float_dtypes, # to escape timeout + "dtype": [None], # no dtype supported } ) ) @pytest.mark.usefixtures("allow_fall_back_on_numpy") -class TestDistributionsGamma(unittest.TestCase): - def check_distribution(self, dist_func, shape_dtype, scale_dtype): +class TestDistributionsGamma: + + def check_distribution( + self, dist_func, shape_dtype, scale_dtype, dtype=None + ): shape = cupy.ones(self.shape_shape, dtype=shape_dtype) scale = cupy.ones(self.scale_shape, dtype=scale_dtype) - out = dist_func(shape, scale, self.shape) - self.assertEqual(self.shape, out.shape) - # numpy and dpdp output dtype is float64 - self.assertEqual(out.dtype, numpy.float64) + if dtype is None: + out = dist_func(shape, scale, self.shape) + else: + out = dist_func(shape, scale, self.shape, dtype) + out_shape = self.shape + if self.shape is None: + out_shape = shape.shape if shape.shape != () else scale.shape + if self.shape is not None: + assert out_shape == out.shape + # assert out.dtype == dtype + + @testing.for_dtypes_combination( + _float_dtypes, names=["shape_dtype", "scale_dtype"] + ) + def test_gamma_legacy(self, shape_dtype, scale_dtype): + self.check_distribution( + _distributions.gamma, shape_dtype, scale_dtype, self.dtype + ) - @_loops.for_dtypes_combination( - _regular_float_dtypes, names=["shape_dtype", "scale_dtype"] + @pytest.mark.skip("no support of generator yet") + @testing.for_dtypes_combination( + _float_dtypes, names=["shape_dtype", "scale_dtype"] ) - def test_gamma(self, shape_dtype, scale_dtype): - self.check_distribution(_distributions.gamma, shape_dtype, scale_dtype) + def test_gamma_generator(self, shape_dtype, scale_dtype): + self.check_distribution( + cupy.random.default_rng().gamma, shape_dtype, scale_dtype + ) @testing.parameterize( @@ -188,26 +239,29 @@ def test_gamma(self, shape_dtype, scale_dtype): { "shape": [(4, 3, 2), (3, 2)], "p_shape": [(), (3, 2)], + # "dtype": _int_dtypes, # to escape timeout + "dtype": [None], # no dtype supported } ) ) -class TestDistributionsGeometric(unittest.TestCase): - def check_distribution(self, dist_func, p_dtype): +@pytest.mark.usefixtures("allow_fall_back_on_numpy") +class TestDistributionsGeometric: + + def check_distribution(self, dist_func, p_dtype, dtype): p = 0.5 * cupy.ones(self.p_shape, dtype=p_dtype) out = dist_func(p, self.shape) - self.assertEqual(self.shape, out.shape) - # numpy output dtype is int64, dpnp output is int32 - self.assertEqual(out.dtype, numpy.int64) + assert self.shape == out.shape + # assert out.dtype == dtype - @_loops.for_float_dtypes("p_dtype") + @testing.for_float_dtypes("p_dtype") def test_geometric(self, p_dtype): - self.check_distribution(_distributions.geometric, p_dtype) + self.check_distribution(_distributions.geometric, p_dtype, self.dtype) @testing.parameterize( *testing.product( { - "shape": [(4, 3, 2), (3, 2)], + "shape": [(4, 3, 2), (3, 2), None], "loc_shape": [(), (3, 2)], "scale_shape": [(), (3, 2)], } @@ -215,8 +269,10 @@ def test_geometric(self, p_dtype): ) @pytest.mark.usefixtures("allow_fall_back_on_numpy") class TestDistributionsGumbel(RandomDistributionsTestCase): - @_loops.for_dtypes_combination( - _regular_float_dtypes, names=["loc_dtype", "scale_dtype"] + + # @testing.for_float_dtypes("dtype", no_float16=True) # no dtype supported + @testing.for_dtypes_combination( + _float_dtypes, names=["loc_dtype", "scale_dtype"] ) def test_gumbel(self, loc_dtype, scale_dtype): loc = numpy.ones(self.loc_shape, dtype=loc_dtype) @@ -232,22 +288,25 @@ def test_gumbel(self, loc_dtype, scale_dtype): "nbad_shape": [(), (3, 2)], "nsample_shape": [(), (3, 2)], "nsample_dtype": [numpy.int32, numpy.int64], # to escape timeout + # "dtype": [numpy.int32, numpy.int64], # to escape timeout + "dtype": [None], # no dtype supported } ) ) -class TestDistributionsHyperGeometric(unittest.TestCase): +@pytest.mark.usefixtures("allow_fall_back_on_numpy") +class TestDistributionsHyperGeometric: + def check_distribution( - self, dist_func, ngood_dtype, nbad_dtype, nsample_dtype + self, dist_func, ngood_dtype, nbad_dtype, nsample_dtype, dtype ): ngood = cupy.ones(self.ngood_shape, dtype=ngood_dtype) nbad = cupy.ones(self.nbad_shape, dtype=nbad_dtype) nsample = cupy.ones(self.nsample_shape, dtype=nsample_dtype) out = dist_func(ngood, nbad, nsample, self.shape) - self.assertEqual(self.shape, out.shape) - # numpy output dtype is int64, dpnp output is int32 - self.assertEqual(out.dtype, numpy.int64) + assert self.shape == out.shape + # assert out.dtype == dtype - @_loops.for_dtypes_combination( + @testing.for_dtypes_combination( [numpy.int32, numpy.int64], names=["ngood_dtype", "nbad_dtype"] ) def test_hypergeometric(self, ngood_dtype, nbad_dtype): @@ -256,13 +315,14 @@ def test_hypergeometric(self, ngood_dtype, nbad_dtype): ngood_dtype, nbad_dtype, self.nsample_dtype, + self.dtype, ) @testing.parameterize( *testing.product( { - "shape": [(4, 3, 2), (3, 2)], + "shape": [(4, 3, 2), (3, 2), None], "loc_shape": [(), (3, 2)], "scale_shape": [(), (3, 2)], } @@ -270,8 +330,10 @@ def test_hypergeometric(self, ngood_dtype, nbad_dtype): ) @pytest.mark.usefixtures("allow_fall_back_on_numpy") class TestDistributionsuLaplace(RandomDistributionsTestCase): - @_loops.for_dtypes_combination( - _regular_float_dtypes, names=["loc_dtype", "scale_dtype"] + + # @testing.for_float_dtypes("dtype", no_float16=True) # no dtype supported + @testing.for_dtypes_combination( + _float_dtypes, names=["loc_dtype", "scale_dtype"] ) def test_laplace(self, loc_dtype, scale_dtype): loc = numpy.ones(self.loc_shape, dtype=loc_dtype) @@ -290,8 +352,10 @@ def test_laplace(self, loc_dtype, scale_dtype): ) @pytest.mark.usefixtures("allow_fall_back_on_numpy") class TestDistributionsLogistic(RandomDistributionsTestCase): - @_loops.for_dtypes_combination( - _regular_float_dtypes, names=["loc_dtype", "scale_dtype"] + + # @testing.for_float_dtypes("dtype", no_float16=True) # no dtype supported + @testing.for_dtypes_combination( + _float_dtypes, names=["loc_dtype", "scale_dtype"] ) def test_logistic(self, loc_dtype, scale_dtype): loc = numpy.ones(self.loc_shape, dtype=loc_dtype) @@ -302,15 +366,18 @@ def test_logistic(self, loc_dtype, scale_dtype): @testing.parameterize( *testing.product( { - "shape": [(4, 3, 2), (3, 2)], - "mean_shape": [()], - "sigma_shape": [()], + "shape": [(4, 3, 2), (3, 2), None], + "mean_shape": [(), (3, 2)], + "sigma_shape": [(), (3, 2)], } ) ) +@pytest.mark.usefixtures("allow_fall_back_on_numpy") class TestDistributionsLognormal(RandomDistributionsTestCase): - @_loops.for_dtypes_combination( - _regular_float_dtypes, names=["mean_dtype", "sigma_dtype"] + + # @testing.for_float_dtypes("dtype", no_float16=True) # no dtype supported + @testing.for_dtypes_combination( + _float_dtypes, names=["mean_dtype", "sigma_dtype"] ) def test_lognormal(self, mean_dtype, sigma_dtype): mean = numpy.ones(self.mean_shape, dtype=mean_dtype) @@ -326,18 +393,22 @@ def test_lognormal(self, mean_dtype, sigma_dtype): } ) ) +@pytest.mark.usefixtures("allow_fall_back_on_numpy") class TestDistributionsLogseries(RandomDistributionsTestCase): - @_loops.for_float_dtypes("p_dtype", no_float16=True) + + # @testing.for_dtypes([numpy.int64, numpy.int32], "dtype") # no dtype supported + @testing.for_float_dtypes("p_dtype", no_float16=True) def test_logseries(self, p_dtype): p = numpy.full(self.p_shape, 0.5, dtype=p_dtype) self.check_distribution("logseries", {"p": p}) - @_loops.for_float_dtypes("p_dtype", no_float16=True) + # @testing.for_dtypes([numpy.int64, numpy.int32], "dtype") # no dtype supported + @testing.for_float_dtypes("p_dtype", no_float16=True) def test_logseries_for_invalid_p(self, p_dtype): - with self.assertRaises(ValueError): - cp_params = {"p": cupy.zeros(self.p_shape, dtype=p_dtype)} - _distributions.logseries(size=self.shape, **cp_params) - with self.assertRaises(ValueError): + # with pytest.raises(ValueError): # no exception raised by numpy + # cp_params = {"p": cupy.zeros(self.p_shape, dtype=p_dtype)} + # _distributions.logseries(size=self.shape, **cp_params) + with pytest.raises(ValueError): cp_params = {"p": cupy.ones(self.p_shape, dtype=p_dtype)} _distributions.logseries(size=self.shape, **cp_params) @@ -350,26 +421,23 @@ def test_logseries_for_invalid_p(self, p_dtype): } ) ) -class TestDistributionsMultivariateNormal(unittest.TestCase): - def check_distribution(self, dist_func, mean_dtype, cov_dtype): +@pytest.mark.skip("multivariate_normal is not fully supported yet") +class TestDistributionsMultivariateNormal: + + def check_distribution(self, dist_func, mean_dtype, cov_dtype, dtype): mean = cupy.zeros(self.d, dtype=mean_dtype) - cov = cupy.random.normal(size=(self.d, self.d)) - # dpnp.dpnp_array doesn't have .dot - # TODO - # no conversation to ndarray - cov = numpy.array(cov) + cov = cupy.random.normal(size=(self.d, self.d), dtype=cov_dtype) cov = cov.T.dot(cov) - cov = cupy.array(cov) - out = dist_func(mean, cov, self.shape) - self.assertEqual(self.shape + (self.d,), out.shape) - # numpy and dpdp output dtype is float64 - self.assertEqual(out.dtype, numpy.float64) - - @_loops.for_float_dtypes("mean_dtype", no_float16=True) - @_loops.for_float_dtypes("cov_dtype", no_float16=True) - def test_normal(self, mean_dtype, cov_dtype): + out = dist_func(mean, cov, self.shape, dtype=dtype) + assert self.shape + (self.d,) == out.shape + assert out.dtype == dtype + + @testing.for_float_dtypes("dtype", no_float16=True) + @testing.for_float_dtypes("mean_dtype", no_float16=True) + @testing.for_float_dtypes("cov_dtype", no_float16=True) + def test_normal(self, mean_dtype, cov_dtype, dtype): self.check_distribution( - _distributions.multivariate_normal, mean_dtype, cov_dtype + _distributions.multivariate_normal, mean_dtype, cov_dtype, dtype ) @@ -379,24 +447,31 @@ def test_normal(self, mean_dtype, cov_dtype): "shape": [(4, 3, 2), (3, 2)], "n_shape": [(), (3, 2)], "p_shape": [(), (3, 2)], + # "dtype": _int_dtypes, # to escape timeout + "dtype": [None], # no dtype supported } ) ) @pytest.mark.usefixtures("allow_fall_back_on_numpy") class TestDistributionsNegativeBinomial(RandomDistributionsTestCase): - @_loops.for_float_dtypes("n_dtype") - @_loops.for_float_dtypes("p_dtype") + + @testing.for_float_dtypes("n_dtype") + @testing.for_float_dtypes("p_dtype") def test_negative_binomial(self, n_dtype, p_dtype): n = numpy.full(self.n_shape, 5, dtype=n_dtype) p = numpy.full(self.p_shape, 0.5, dtype=p_dtype) - self.check_distribution("negative_binomial", {"n": n, "p": p}) + self.check_distribution( + "negative_binomial", {"n": n, "p": p}, self.dtype + ) - @_loops.for_float_dtypes("n_dtype") - @_loops.for_float_dtypes("p_dtype") + @testing.for_float_dtypes("n_dtype") + @testing.for_float_dtypes("p_dtype") def test_negative_binomial_for_noninteger_n(self, n_dtype, p_dtype): n = numpy.full(self.n_shape, 5.5, dtype=n_dtype) p = numpy.full(self.p_shape, 0.5, dtype=p_dtype) - self.check_distribution("negative_binomial", {"n": n, "p": p}) + self.check_distribution( + "negative_binomial", {"n": n, "p": p}, self.dtype + ) @testing.parameterize( @@ -405,31 +480,34 @@ def test_negative_binomial_for_noninteger_n(self, n_dtype, p_dtype): "shape": [(4, 3, 2), (3, 2)], "df_shape": [(), (3, 2)], "nonc_shape": [(), (3, 2)], + # "dtype": _int_dtypes, # to escape timeout + "dtype": [None], # no dtype supported } ) ) @pytest.mark.usefixtures("allow_fall_back_on_numpy") class TestDistributionsNoncentralChisquare(RandomDistributionsTestCase): - @_loops.for_dtypes_combination( + + @testing.for_dtypes_combination( _regular_float_dtypes, names=["df_dtype", "nonc_dtype"] ) def test_noncentral_chisquare(self, df_dtype, nonc_dtype): df = numpy.full(self.df_shape, 1, dtype=df_dtype) nonc = numpy.full(self.nonc_shape, 1, dtype=nonc_dtype) self.check_distribution( - "noncentral_chisquare", {"df": df, "nonc": nonc} + "noncentral_chisquare", {"df": df, "nonc": nonc}, self.dtype ) - @_loops.for_float_dtypes("param_dtype", no_float16=True) + @testing.for_float_dtypes("param_dtype", no_float16=True) def test_noncentral_chisquare_for_invalid_params(self, param_dtype): df = cupy.full(self.df_shape, -1, dtype=param_dtype) nonc = cupy.full(self.nonc_shape, 1, dtype=param_dtype) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): _distributions.noncentral_chisquare(df, nonc, size=self.shape) df = cupy.full(self.df_shape, 1, dtype=param_dtype) nonc = cupy.full(self.nonc_shape, -1, dtype=param_dtype) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): _distributions.noncentral_chisquare(df, nonc, size=self.shape) @@ -440,12 +518,15 @@ def test_noncentral_chisquare_for_invalid_params(self, param_dtype): "dfnum_shape": [(), (3, 2)], "dfden_shape": [(), (3, 2)], "nonc_shape": [(), (3, 2)], + # "dtype": _int_dtypes, # to escape timeout + "dtype": [None], # no dtype supported } ) ) @pytest.mark.usefixtures("allow_fall_back_on_numpy") class TestDistributionsNoncentralF(RandomDistributionsTestCase): - @_loops.for_dtypes_combination( + + @testing.for_dtypes_combination( _regular_float_dtypes, names=["dfnum_dtype", "dfden_dtype", "nonc_dtype"], ) @@ -454,34 +535,36 @@ def test_noncentral_f(self, dfnum_dtype, dfden_dtype, nonc_dtype): dfden = numpy.full(self.dfden_shape, 1, dtype=dfden_dtype) nonc = numpy.full(self.nonc_shape, 1, dtype=nonc_dtype) self.check_distribution( - "noncentral_f", {"dfnum": dfnum, "dfden": dfden, "nonc": nonc} + "noncentral_f", + {"dfnum": dfnum, "dfden": dfden, "nonc": nonc}, + self.dtype, ) - @_loops.for_float_dtypes("param_dtype", no_float16=True) + @testing.for_float_dtypes("param_dtype", no_float16=True) def test_noncentral_f_for_invalid_params(self, param_dtype): dfnum = numpy.full(self.dfnum_shape, -1, dtype=param_dtype) dfden = numpy.full(self.dfden_shape, 1, dtype=param_dtype) nonc = numpy.full(self.nonc_shape, 1, dtype=param_dtype) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): _distributions.noncentral_f(dfnum, dfden, nonc, size=self.shape) dfnum = numpy.full(self.dfnum_shape, 1, dtype=param_dtype) dfden = numpy.full(self.dfden_shape, -1, dtype=param_dtype) nonc = numpy.full(self.nonc_shape, 1, dtype=param_dtype) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): _distributions.noncentral_f(dfnum, dfden, nonc, size=self.shape) dfnum = numpy.full(self.dfnum_shape, 1, dtype=param_dtype) dfden = numpy.full(self.dfden_shape, 1, dtype=param_dtype) nonc = numpy.full(self.nonc_shape, -1, dtype=param_dtype) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): _distributions.noncentral_f(dfnum, dfden, nonc, size=self.shape) @testing.parameterize( *testing.product( { - "shape": [(4, 3, 2), (3, 2)], + "shape": [(4, 3, 2), (3, 2), None], "loc_shape": [(), (3, 2)], "scale_shape": [(), (3, 2)], } @@ -489,8 +572,10 @@ def test_noncentral_f_for_invalid_params(self, param_dtype): ) @pytest.mark.usefixtures("allow_fall_back_on_numpy") class TestDistributionsNormal(RandomDistributionsTestCase): - @_loops.for_dtypes_combination( - _regular_float_dtypes, names=["loc_dtype", "scale_dtype"] + + # @testing.for_float_dtypes("dtype", no_float16=True) # no dtype supported + @testing.for_dtypes_combination( + _float_dtypes, names=["loc_dtype", "scale_dtype"] ) def test_normal(self, loc_dtype, scale_dtype): loc = numpy.ones(self.loc_shape, dtype=loc_dtype) @@ -501,21 +586,23 @@ def test_normal(self, loc_dtype, scale_dtype): @testing.parameterize( *testing.product( { - "shape": [(4, 3, 2), (3, 2)], + "shape": [(4, 3, 2), (3, 2), None], "a_shape": [(), (3, 2)], } ) ) @pytest.mark.usefixtures("allow_fall_back_on_numpy") -class TestDistributionsPareto(unittest.TestCase): +class TestDistributionsPareto: + def check_distribution(self, dist_func, a_dtype): a = cupy.ones(self.a_shape, dtype=a_dtype) out = dist_func(a, self.shape) - self.assertEqual(self.shape, out.shape) - # numpy and dpdp output dtype is float64 - self.assertEqual(out.dtype, numpy.float64) + if self.shape is not None: + assert self.shape == out.shape + # assert out.dtype == dtype - @_loops.for_float_dtypes("a_dtype") + # @testing.for_float_dtypes("dtype", no_float16=True) # no dtype supported + @testing.for_float_dtypes("a_dtype") def test_pareto(self, a_dtype): self.check_distribution(_distributions.pareto, a_dtype) @@ -523,23 +610,49 @@ def test_pareto(self, a_dtype): @testing.parameterize( *testing.product( { - "shape": [(4, 3, 2), (3, 2)], + "shape": [(4, 3, 2), (3, 2), None], "lam_shape": [(), (3, 2)], } ) ) -class TestDistributionsPoisson(unittest.TestCase): - def check_distribution(self, dist_func, lam_dtype): - lam = cupy.full(self.lam_shape, 5, dtype=lam_dtype) - out = dist_func(lam, self.shape) - self.assertEqual(self.shape, out.shape) - # numpy output dtype is int64, dpnp output is int32 - self.assertEqual(out.dtype, numpy.int64) +@pytest.mark.usefixtures("allow_fall_back_on_numpy") +class TestDistributionsPoisson: - @_loops.for_float_dtypes("lam_dtype") - def test_poisson(self, lam_dtype): + def check_distribution(self, dist_func, lam_dtype, dtype=None): + lam = cupy.full(self.lam_shape, 5, dtype=lam_dtype) + if dtype is not None: + out = dist_func(lam, self.shape, dtype) + assert out.dtype == dtype + else: + out = dist_func(lam, self.shape) + if self.shape is not None: + assert self.shape == out.shape + # else: + # assert lam.shape == out.shape + + # @testing.for_int_dtypes("dtype") # no dtype supported + @testing.for_float_dtypes("lam_dtype") + def test_poisson_legacy(self, lam_dtype): self.check_distribution(_distributions.poisson, lam_dtype) + @pytest.mark.skip("no support of generator yet") + @testing.for_float_dtypes("lam_dtype") + def test_poisson_generator(self, lam_dtype): + self.check_distribution(cupy.random.default_rng(0).poisson, lam_dtype) + + +class TestDistributionsPoissonInvalid: + + @pytest.mark.skip("no support of generator yet") + def test_none_lam_generator(self): + with pytest.raises(TypeError): + cupy.random.default_rng(0).poisson(None) + + @pytest.mark.usefixtures("allow_fall_back_on_numpy") + def test_none_lam_legacy(self): + with pytest.raises(TypeError): + _distributions.poisson(None) + @testing.parameterize( *testing.product( @@ -549,16 +662,20 @@ def test_poisson(self, lam_dtype): } ) ) +@pytest.mark.usefixtures("allow_fall_back_on_numpy") class TestDistributionsPower(RandomDistributionsTestCase): - @_loops.for_float_dtypes("a_dtype") + + # @testing.for_float_dtypes("dtype", no_float16=True) # no dtype supported + @testing.for_float_dtypes("a_dtype") def test_power(self, a_dtype): a = numpy.full(self.a_shape, 0.5, dtype=a_dtype) self.check_distribution("power", {"a": a}) - @_loops.for_float_dtypes("a_dtype") + # @testing.for_float_dtypes("dtype", no_float16=True) # no dtype supported + @testing.for_float_dtypes("a_dtype") def test_power_for_negative_a(self, a_dtype): a = numpy.full(self.a_shape, -0.5, dtype=a_dtype) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): cp_params = {"a": cupy.asarray(a)} getattr(_distributions, "power")(size=self.shape, **cp_params) @@ -566,27 +683,31 @@ def test_power_for_negative_a(self, a_dtype): @testing.parameterize( *testing.product( { - "shape": [(4, 3, 2), (3, 2)], + "shape": [(4, 3, 2), (3, 2), None], "scale_shape": [(), (3, 2)], } ) ) @pytest.mark.usefixtures("allow_fall_back_on_numpy") class TestDistributionsRayleigh(RandomDistributionsTestCase): - @_loops.for_float_dtypes("scale_dtype") + + # @testing.for_float_dtypes("dtype", no_float16=True) # no dtype supported + @testing.for_float_dtypes("scale_dtype") def test_rayleigh(self, scale_dtype): scale = numpy.full(self.scale_shape, 3, dtype=scale_dtype) self.check_distribution("rayleigh", {"scale": scale}) - @_loops.for_float_dtypes("scale_dtype") + # @testing.for_float_dtypes("dtype", no_float16=True) # no dtype supported + @testing.for_float_dtypes("scale_dtype") def test_rayleigh_for_zero_scale(self, scale_dtype): scale = numpy.zeros(self.scale_shape, dtype=scale_dtype) self.check_distribution("rayleigh", {"scale": scale}) - @_loops.for_float_dtypes("scale_dtype") + # @testing.for_float_dtypes("dtype", no_float16=True) # no dtype supported + @testing.for_float_dtypes("scale_dtype") def test_rayleigh_for_negative_scale(self, scale_dtype): scale = numpy.full(self.scale_shape, -0.5, dtype=scale_dtype) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): cp_params = {"scale": cupy.asarray(scale)} _distributions.rayleigh(size=self.shape, **cp_params) @@ -599,6 +720,8 @@ def test_rayleigh_for_negative_scale(self, scale_dtype): ) ) class TestDistributionsStandardCauchy(RandomDistributionsTestCase): + + # @testing.for_float_dtypes("dtype", no_float16=True) # no dtype supported def test_standard_cauchy(self): self.check_distribution("standard_cauchy", {}) @@ -606,11 +729,13 @@ def test_standard_cauchy(self): @testing.parameterize( *testing.product( { - "shape": [(4, 3, 2), (3, 2)], + "shape": [(4, 3, 2), (3, 2), None], } ) ) class TestDistributionsStandardExponential(RandomDistributionsTestCase): + + # @testing.for_float_dtypes("dtype", no_float16=True) # no dtype supported def test_standard_exponential(self): self.check_distribution("standard_exponential", {}) @@ -618,27 +743,53 @@ def test_standard_exponential(self): @testing.parameterize( *testing.product( { - "shape": [(4, 3, 2), (3, 2)], + "shape": [(4, 3, 2), (3, 2), None], "shape_shape": [(), (3, 2)], } ) ) @pytest.mark.usefixtures("allow_fall_back_on_numpy") class TestDistributionsStandardGamma(RandomDistributionsTestCase): - @_loops.for_float_dtypes("shape_dtype") - def test_standard_gamma(self, shape_dtype): + + # @testing.for_float_dtypes("dtype", no_float16=True) # no dtype supported + @testing.for_float_dtypes("shape_dtype") + def test_standard_gamma_legacy(self, shape_dtype): shape = numpy.ones(self.shape_shape, dtype=shape_dtype) self.check_distribution("standard_gamma", {"shape": shape}) + @pytest.mark.skip("no support of generator yet") + @testing.for_float_dtypes("dtype", no_float16=True) + @testing.for_float_dtypes("shape_dtype") + def test_standard_gamma_generator(self, shape_dtype, dtype): + shape = numpy.ones(self.shape_shape, dtype=shape_dtype) + self.check_generator_distribution( + "standard_gamma", {"shape": shape}, dtype + ) + + +class TestDistributionsStandardGammaInvalid(RandomDistributionsTestCase): + + @pytest.mark.skip("no support of generator yet") + def test_none_shape_generator(self): + with pytest.raises(TypeError): + cupy.random.default_rng(0).standard_gamma(None) + + @pytest.mark.usefixtures("allow_fall_back_on_numpy") + def test_none_shape_legacy(self): + with pytest.raises(TypeError): + _distributions.standard_gamma(None) + @testing.parameterize( *testing.product( { - "shape": [(4, 3, 2), (3, 2)], + "shape": [(4, 3, 2), (3, 2), None], } ) ) class TestDistributionsStandardNormal(RandomDistributionsTestCase): + + # @testing.for_float_dtypes("dtype", no_float16=True) # no dtype supported def test_standard_normal(self): self.check_distribution("standard_normal", {}) @@ -652,15 +803,16 @@ def test_standard_normal(self): ) ) @pytest.mark.usefixtures("allow_fall_back_on_numpy") -class TestDistributionsStandardT(unittest.TestCase): +class TestDistributionsStandardT: + def check_distribution(self, dist_func, df_dtype): df = cupy.ones(self.df_shape, dtype=df_dtype) out = dist_func(df, self.shape) - self.assertEqual(self.shape, out.shape) - # numpy and dpdp output dtype is float64 - self.assertEqual(out.dtype, numpy.float64) + assert self.shape == out.shape + # assert out.dtype == dtype - @_loops.for_float_dtypes("df_dtype") + # @testing.for_float_dtypes("dtype", no_float16=True) # no dtype supported + @testing.for_float_dtypes("df_dtype") def test_standard_t(self, df_dtype): self.check_distribution(_distributions.standard_t, df_dtype) @@ -672,12 +824,15 @@ def test_standard_t(self, df_dtype): "left_shape": [(), (3, 2)], "mode_shape": [(), (3, 2)], "right_shape": [(), (3, 2)], + # "dtype": _regular_float_dtypes, # to escape timeout + "dtype": [None], # no dtype supported } ) ) @pytest.mark.usefixtures("allow_fall_back_on_numpy") class TestDistributionsTriangular(RandomDistributionsTestCase): - @_loops.for_dtypes_combination( + + @testing.for_dtypes_combination( _regular_float_dtypes, names=["left_dtype", "mode_dtype", "right_dtype"] ) def test_triangular(self, left_dtype, mode_dtype, right_dtype): @@ -685,27 +840,29 @@ def test_triangular(self, left_dtype, mode_dtype, right_dtype): mode = numpy.full(self.mode_shape, 0, dtype=mode_dtype) right = numpy.full(self.right_shape, 2, dtype=right_dtype) self.check_distribution( - "triangular", {"left": left, "mode": mode, "right": right} + "triangular", + {"left": left, "mode": mode, "right": right}, + self.dtype, ) - @_loops.for_float_dtypes("param_dtype", no_float16=True) + @testing.for_float_dtypes("param_dtype", no_float16=True) def test_triangular_for_invalid_params(self, param_dtype): left = cupy.full(self.left_shape, 1, dtype=param_dtype) mode = cupy.full(self.mode_shape, 0, dtype=param_dtype) right = cupy.full(self.right_shape, 2, dtype=param_dtype) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): _distributions.triangular(left, mode, right, size=self.shape) left = cupy.full(self.left_shape, -2, dtype=param_dtype) mode = cupy.full(self.mode_shape, 0, dtype=param_dtype) right = cupy.full(self.right_shape, -1, dtype=param_dtype) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): _distributions.triangular(left, mode, right, size=self.shape) left = cupy.full(self.left_shape, 0, dtype=param_dtype) mode = cupy.full(self.mode_shape, 0, dtype=param_dtype) right = cupy.full(self.right_shape, 0, dtype=param_dtype) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): _distributions.triangular(left, mode, right, size=self.shape) @@ -720,8 +877,10 @@ def test_triangular_for_invalid_params(self, param_dtype): ) @pytest.mark.usefixtures("allow_fall_back_on_numpy") class TestDistributionsUniform(RandomDistributionsTestCase): - @_loops.for_dtypes_combination( - _regular_float_dtypes, names=["low_dtype", "high_dtype"] + + # @testing.for_float_dtypes("dtype", no_float16=True) # no dtype supported + @testing.for_dtypes_combination( + _float_dtypes, names=["low_dtype", "high_dtype"] ) def test_uniform(self, low_dtype, high_dtype): low = numpy.ones(self.low_shape, dtype=low_dtype) @@ -735,24 +894,28 @@ def test_uniform(self, low_dtype, high_dtype): "shape": [(4, 3, 2), (3, 2)], "mu_shape": [(), (3, 2)], "kappa_shape": [(), (3, 2)], + # "dtype": _float_dtypes, # to escape timeout + "dtype": [None], # no dtype supported } ) ) @pytest.mark.usefixtures("allow_fall_back_on_numpy") -class TestDistributionsVonmises(unittest.TestCase): - def check_distribution(self, dist_func, mu_dtype, kappa_dtype): +class TestDistributionsVonmises: + + def check_distribution(self, dist_func, mu_dtype, kappa_dtype, dtype): mu = cupy.ones(self.mu_shape, dtype=mu_dtype) kappa = cupy.ones(self.kappa_shape, dtype=kappa_dtype) out = dist_func(mu, kappa, self.shape) - self.assertEqual(self.shape, out.shape) - # numpy and dpdp output dtype is float64 - self.assertEqual(out.dtype, numpy.float64) + assert self.shape == out.shape + # assert out.dtype == dtype - @_loops.for_dtypes_combination( - _regular_float_dtypes, names=["mu_dtype", "kappa_dtype"] + @testing.for_dtypes_combination( + _float_dtypes, names=["mu_dtype", "kappa_dtype"] ) def test_vonmises(self, mu_dtype, kappa_dtype): - self.check_distribution(_distributions.vonmises, mu_dtype, kappa_dtype) + self.check_distribution( + _distributions.vonmises, mu_dtype, kappa_dtype, self.dtype + ) @testing.parameterize( @@ -761,18 +924,23 @@ def test_vonmises(self, mu_dtype, kappa_dtype): "shape": [(4, 3, 2), (3, 2)], "mean_shape": [(), (3, 2)], "scale_shape": [(), (3, 2)], + # "dtype": _regular_float_dtypes, # to escape timeout + "dtype": [None], # no dtype supported } ) ) @pytest.mark.usefixtures("allow_fall_back_on_numpy") class TestDistributionsWald(RandomDistributionsTestCase): - @_loops.for_dtypes_combination( - _regular_float_dtypes, names=["mean_dtype", "scale_dtype"] + + @testing.for_dtypes_combination( + _float_dtypes, names=["mean_dtype", "scale_dtype"] ) def test_wald(self, mean_dtype, scale_dtype): mean = numpy.full(self.mean_shape, 3, dtype=mean_dtype) scale = numpy.full(self.scale_shape, 3, dtype=scale_dtype) - self.check_distribution("wald", {"mean": mean, "scale": scale}) + self.check_distribution( + "wald", {"mean": mean, "scale": scale}, self.dtype + ) @testing.parameterize( @@ -785,20 +953,24 @@ def test_wald(self, mean_dtype, scale_dtype): ) @pytest.mark.usefixtures("allow_fall_back_on_numpy") class TestDistributionsWeibull(RandomDistributionsTestCase): - @_loops.for_float_dtypes("a_dtype") + + # @testing.for_float_dtypes("dtype", no_float16=True) # no dtype supported + @testing.for_float_dtypes("a_dtype") def test_weibull(self, a_dtype): a = numpy.ones(self.a_shape, dtype=a_dtype) self.check_distribution("weibull", {"a": a}) - @_loops.for_float_dtypes("a_dtype") + # @testing.for_float_dtypes("dtype", no_float16=True) # no dtype supported + @testing.for_float_dtypes("a_dtype") def test_weibull_for_inf_a(self, a_dtype): a = numpy.full(self.a_shape, numpy.inf, dtype=a_dtype) self.check_distribution("weibull", {"a": a}) - @_loops.for_float_dtypes("a_dtype") + # @testing.for_float_dtypes("dtype", no_float16=True) # no dtype supported + @testing.for_float_dtypes("a_dtype") def test_weibull_for_negative_a(self, a_dtype): a = numpy.full(self.a_shape, -0.5, dtype=a_dtype) - with self.assertRaises(ValueError): + with pytest.raises(ValueError): cp_params = {"a": cupy.asarray(a)} getattr(_distributions, "weibull")(size=self.shape, **cp_params) @@ -813,8 +985,9 @@ def test_weibull_for_negative_a(self, a_dtype): ) @pytest.mark.usefixtures("allow_fall_back_on_numpy") class TestDistributionsZipf(RandomDistributionsTestCase): - @_loops.for_dtypes([numpy.int32, numpy.int64], "dtype") - @_loops.for_float_dtypes("a_dtype") - def test_zipf(self, a_dtype, dtype): + + # @testing.for_dtypes([numpy.int32, numpy.int64], "dtype") # no dtype supported + @testing.for_float_dtypes("a_dtype") + def test_zipf(self, a_dtype): a = numpy.full(self.a_shape, 2, dtype=a_dtype) self.check_distribution("zipf", {"a": a}) diff --git a/tests/third_party/cupy/testing/_loops.py b/tests/third_party/cupy/testing/_loops.py index f8ae6de9dad..a7d1d72f674 100644 --- a/tests/third_party/cupy/testing/_loops.py +++ b/tests/third_party/cupy/testing/_loops.py @@ -1229,9 +1229,9 @@ def for_dtypes_combination(types, names=("dtype",), full=None): """ types = list(types) - if len(types) == 1: - (name,) = names - return for_dtypes(types, name) + # if len(types) == 1: + # (name,) = names + # return for_dtypes(types, name) if full is None: full = int(os.environ.get("CUPY_TEST_FULL_COMBINATION", "0")) != 0 From f9387f70ae81f6a5d9edfe3e3be9b69a08d5f1ea Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Fri, 15 Nov 2024 18:33:34 +0100 Subject: [PATCH 27/29] Resolve failure on Windows in hypergeometric distribution --- .../third_party/cupy/random_tests/test_distributions.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/tests/third_party/cupy/random_tests/test_distributions.py b/tests/third_party/cupy/random_tests/test_distributions.py index 049113b3f76..892ec897ea0 100644 --- a/tests/third_party/cupy/random_tests/test_distributions.py +++ b/tests/third_party/cupy/random_tests/test_distributions.py @@ -3,7 +3,7 @@ import dpnp as cupy from dpnp import random as _distributions -from tests.helper import has_support_aspect64 +from tests.helper import has_support_aspect64, is_win_platform from tests.third_party.cupy import testing if has_support_aspect64(): @@ -310,6 +310,13 @@ def check_distribution( [numpy.int32, numpy.int64], names=["ngood_dtype", "nbad_dtype"] ) def test_hypergeometric(self, ngood_dtype, nbad_dtype): + if ( + is_win_platform() + and numpy.int64 in (ngood_dtype, nbad_dtype) + and numpy.lib.NumpyVersion(numpy.__version__) < "2.0" + ): + pytest.skip("numpy raises TypeError") + self.check_distribution( _distributions.hypergeometric, ngood_dtype, From 2c19837332fd9a4868241716ef2323c59099501a Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Fri, 15 Nov 2024 18:35:45 +0100 Subject: [PATCH 28/29] Allow fallback on NumPy in a test for randint --- tests/third_party/cupy/random_tests/test_sample.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tests/third_party/cupy/random_tests/test_sample.py b/tests/third_party/cupy/random_tests/test_sample.py index ce6d5d175f7..97a28310035 100644 --- a/tests/third_party/cupy/random_tests/test_sample.py +++ b/tests/third_party/cupy/random_tests/test_sample.py @@ -105,6 +105,7 @@ def test_goodness_of_fit_2(self): class TestRandintDtype(unittest.TestCase): + @pytest.mark.usefixtures("allow_fall_back_on_numpy") @testing.with_requires("numpy>=2.0") @testing.for_dtypes( [numpy.int8, numpy.uint8, numpy.int16, numpy.uint16, numpy.int32] From f9775cdd5cb5e032d4390827cdb8fbf5bd67f58c Mon Sep 17 00:00:00 2001 From: Anton Volkov Date: Sat, 16 Nov 2024 13:05:02 +0100 Subject: [PATCH 29/29] Make a valid numpy version string --- tests/third_party/cupy/random_tests/test_distributions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/third_party/cupy/random_tests/test_distributions.py b/tests/third_party/cupy/random_tests/test_distributions.py index 892ec897ea0..a1574a1a2ac 100644 --- a/tests/third_party/cupy/random_tests/test_distributions.py +++ b/tests/third_party/cupy/random_tests/test_distributions.py @@ -313,7 +313,7 @@ def test_hypergeometric(self, ngood_dtype, nbad_dtype): if ( is_win_platform() and numpy.int64 in (ngood_dtype, nbad_dtype) - and numpy.lib.NumpyVersion(numpy.__version__) < "2.0" + and numpy.lib.NumpyVersion(numpy.__version__) < "2.0.0" ): pytest.skip("numpy raises TypeError")