diff --git a/CMakeLists.txt b/CMakeLists.txt index 66842e6845edd..412b9c0cd59e0 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -70,10 +70,11 @@ find_package(Torch REQUIRED) # config is used for standalone C++ binaries that link against torch). # The `libtorch_python.so` library defines some of the glue code between # torch/python via pybind and is required by VLLM extensions for this -# reason. So, add it by manually using `append_torchlib_if_found` from -# torch's cmake setup. +# reason. So, add it by manually with `find_library` using torch's +# installed library path. # -append_torchlib_if_found(torch_python) +find_library(torch_python_LIBRARY torch_python PATHS + "${TORCH_INSTALL_PREFIX}/lib") # # Set up GPU language and check the torch version and warn if it isn't diff --git a/cmake/utils.cmake b/cmake/utils.cmake index bb222bb437b1d..8ac81bf2e23e3 100644 --- a/cmake/utils.cmake +++ b/cmake/utils.cmake @@ -281,7 +281,7 @@ endmacro() # not provided. # COMPILE_FLAGS - Extra compiler flags passed to NVCC/hip. # INCLUDE_DIRECTORIES - Extra include directories. -# LINK_LIBRARIES - Extra link libraries. +# LIBRARIES - Extra link libraries. # WITH_SOABI - Generate library with python SOABI suffix name. # # Note: optimization level/debug info is set via cmake build type. @@ -327,8 +327,17 @@ function (define_gpu_extension_target GPU_MOD_NAME) target_include_directories(${GPU_MOD_NAME} PRIVATE csrc ${GPU_INCLUDE_DIRECTORIES}) - target_link_libraries(${GPU_MOD_NAME} PRIVATE ${TORCH_LIBRARIES} + target_link_libraries(${GPU_MOD_NAME} PRIVATE torch ${torch_python_LIBRARY} ${GPU_LIBRARIES}) + # Don't use `TORCH_LIBRARIES` for CUDA since it pulls in a bunch of + # dependencies that are not necessary and may not be installed. + if (GPU_LANGUAGE STREQUAL "CUDA") + target_link_libraries(${GPU_MOD_NAME} PRIVATE ${CUDA_CUDA_LIB} + ${CUDA_LIBRARIES}) + else() + target_link_libraries(${GPU_MOD_NAME} PRIVATE ${TORCH_LIBRARIES}) + endif() + install(TARGETS ${GPU_MOD_NAME} LIBRARY DESTINATION ${GPU_DESTINATION}) endfunction()