diff --git a/python/dgl/graphbolt/internal_utils.py b/python/dgl/graphbolt/internal_utils.py index 3a20a95bfe74..53e7ed8ed5f6 100644 --- a/python/dgl/graphbolt/internal_utils.py +++ b/python/dgl/graphbolt/internal_utils.py @@ -1,6 +1,8 @@ """Miscallenous internal utils.""" +import functools import hashlib import os +import platform import warnings from collections.abc import Mapping, Sequence @@ -15,6 +17,18 @@ from setuptools.extern.packaging import version +@functools.lru_cache(maxsize=None) +def is_wsl(v: str = platform.uname().release) -> int: + """Detects if Python is running in WSL""" + + if v.endswith("-Microsoft"): + return 1 + elif v.endswith("microsoft-standard-WSL2"): + return 2 + + return 0 + + # pylint: disable=invalid-name _default_formatwarning = warnings.formatwarning diff --git a/tests/python/pytorch/graphbolt/impl/test_fused_csc_sampling_graph.py b/tests/python/pytorch/graphbolt/impl/test_fused_csc_sampling_graph.py index e99873527d90..2b7d9ee11327 100644 --- a/tests/python/pytorch/graphbolt/impl/test_fused_csc_sampling_graph.py +++ b/tests/python/pytorch/graphbolt/impl/test_fused_csc_sampling_graph.py @@ -1618,6 +1618,9 @@ def test_csc_sampling_graph_to_device(device): F._default_context_str == "cpu", reason="Tests for pinned memory are only meaningful on GPU.", ) +@unittest.skipIf( + gb.is_wsl(), reason="In place pinning is not supported on WSL." +) def test_csc_sampling_graph_to_pinned_memory(): # Construct FusedCSCSamplingGraph. graph = create_fused_csc_sampling_graph() diff --git a/tests/python/pytorch/graphbolt/impl/test_torch_based_feature_store.py b/tests/python/pytorch/graphbolt/impl/test_torch_based_feature_store.py index c36e9b0f7d0a..201b024241be 100644 --- a/tests/python/pytorch/graphbolt/impl/test_torch_based_feature_store.py +++ b/tests/python/pytorch/graphbolt/impl/test_torch_based_feature_store.py @@ -1,8 +1,6 @@ import os import tempfile import unittest -from functools import reduce -from operator import mul import backend as F @@ -216,6 +214,8 @@ def test_torch_based_pinned_feature(dtype, idtype, shape, in_place): feature = gb.TorchBasedFeature(tensor) if in_place: + if gb.is_wsl(): + pytest.skip("In place pinning is not supported on WSL.") feature.pin_memory_() # Check if pinning is truly in-place.