From 1ef0a487409911c703b9a1a93e7cba9eb3059b21 Mon Sep 17 00:00:00 2001 From: Antonio Ospite Date: Sat, 21 Oct 2023 11:02:43 +0200 Subject: [PATCH] Check for support of CUDA Memory Pools at runtime (#4679) Some CUDA GPUs, like the Quadro M3000M don't support Memory Pools operations like cudaMallocAsync/cudaFreeAsync even on driver versions newer than 11020, and this can result in errors like: CUDA runtime error: operation not supported So check for support at runtime instead of compile time. --- CHANGELOG.md | 1 + cpp/open3d/core/CUDAUtils.cpp | 19 +++++++++++++++++++ cpp/open3d/core/CUDAUtils.h | 7 +++++++ cpp/open3d/core/MemoryManagerCUDA.cpp | 22 +++++++++++----------- 4 files changed, 38 insertions(+), 11 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8965478f135..b4e8ade51b1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -18,6 +18,7 @@ * Changed TriangleMesh to store materials in a list so they can be accessed by the material index (PR #5938) * Support multi-threading in the RayCastingScene function to commit scene (PR #6051). * Fix some bad triangle generation in TriangleMesh::SimplifyQuadricDecimation +* Check for support of CUDA Memory Pools at runtime (#4679) ## 0.13 diff --git a/cpp/open3d/core/CUDAUtils.cpp b/cpp/open3d/core/CUDAUtils.cpp index 630cbf38938..05c81e77ead 100644 --- a/cpp/open3d/core/CUDAUtils.cpp +++ b/cpp/open3d/core/CUDAUtils.cpp @@ -108,6 +108,25 @@ void AssertCUDADeviceAvailable(const Device& device) { } } +bool SupportsMemoryPools(const Device& device) { +#ifdef BUILD_CUDA_MODULE + if (device.IsCUDA()) { + int driverVersion = 0; + int deviceSupportsMemoryPools = 0; + OPEN3D_CUDA_CHECK(cudaDriverGetVersion(&driverVersion)); + if (driverVersion >= + 11020) { // avoid invalid value error in cudaDeviceGetAttribute + OPEN3D_CUDA_CHECK(cudaDeviceGetAttribute(&deviceSupportsMemoryPools, cudaDevAttrMemoryPoolsSupported, device.GetID()); + } + return !!deviceSupportsMemoryPools; + } else { + return false; + } +#else + return false; +#endif +} + #ifdef BUILD_CUDA_MODULE int GetDevice() { int device; diff --git a/cpp/open3d/core/CUDAUtils.h b/cpp/open3d/core/CUDAUtils.h index 2996cd0987c..15f87be040d 100644 --- a/cpp/open3d/core/CUDAUtils.h +++ b/cpp/open3d/core/CUDAUtils.h @@ -255,6 +255,13 @@ void AssertCUDADeviceAvailable(int device_id); /// \param device The device to be checked. void AssertCUDADeviceAvailable(const Device& device); +/// Checks if the CUDA device support Memory Pools +/// used by the Stream Ordered Memory Allocator, +/// see +/// https://docs.nvidia.com/cuda/cuda-runtime-api/group__CUDART__MEMORY__POOLS.html +/// \param device The device to be checked. +bool SupportsMemoryPools(const Device& device); + #ifdef BUILD_CUDA_MODULE int GetDevice(); diff --git a/cpp/open3d/core/MemoryManagerCUDA.cpp b/cpp/open3d/core/MemoryManagerCUDA.cpp index 3cc2f4730bc..8d35d645c37 100644 --- a/cpp/open3d/core/MemoryManagerCUDA.cpp +++ b/cpp/open3d/core/MemoryManagerCUDA.cpp @@ -19,12 +19,12 @@ void* MemoryManagerCUDA::Malloc(size_t byte_size, const Device& device) { void* ptr; if (device.IsCUDA()) { -#if CUDART_VERSION >= 11020 - OPEN3D_CUDA_CHECK(cudaMallocAsync(static_cast(&ptr), byte_size, - cuda::GetStream())); -#else - OPEN3D_CUDA_CHECK(cudaMalloc(static_cast(&ptr), byte_size)); -#endif + if (cuda::SupportsMemoryPools(device)) { + OPEN3D_CUDA_CHECK(cudaMallocAsync(static_cast(&ptr), + byte_size, cuda::GetStream())); + } else { + OPEN3D_CUDA_CHECK(cudaMalloc(static_cast(&ptr), byte_size)); + } } else { utility::LogError("Internal error: Unimplemented device {}.", device.ToString()); @@ -37,11 +37,11 @@ void MemoryManagerCUDA::Free(void* ptr, const Device& device) { if (device.IsCUDA()) { if (ptr && IsCUDAPointer(ptr, device)) { -#if CUDART_VERSION >= 11020 - OPEN3D_CUDA_CHECK(cudaFreeAsync(ptr, cuda::GetStream())); -#else - OPEN3D_CUDA_CHECK(cudaFree(ptr)); -#endif + if (cuda::SupportsMemoryPools(device)) { + OPEN3D_CUDA_CHECK(cudaFreeAsync(ptr, cuda::GetStream())); + } else { + OPEN3D_CUDA_CHECK(cudaFree(ptr)); + } } } else { utility::LogError("Internal error: Unimplemented device {}.",