Python is detecting GPU and CUDA but not the C++ API
$ uname -r ; python -c "import torch; print(torch.cuda.is_available())"
5.4.164-1-MANJARO
True
$ make && ./bin/test
[ 50%] Linking CXX executable bin/test
[100%] Built target test
Cuda not available
Following is the CMake configuration
cmake_minimum_required(VERSION 3.21)
project(test VERSION 0.1.0)
enable_language(CXX CUDA)
set(CMAKE_CXX_STANDARD 20)
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_SOURCE_DIR}/bin)
# libtorch settings
if (DEFINED ENV{CMAKE_PREFIX_PATH})
message("-- Setting the cmake prefix path to \"$ENV{CMAKE_PREFIX_PATH}\"")
set(CMAKE_PREFIX_PATH "$ENV{CMAKE_PREFIX_PATH}")
else ()
message("-- Setting default cmake prefix path to \"/opt/libtorch/share/cmake/Torch\"")
set(CMAKE_PREFIX_PATH /opt/libtorch/share/cmake/Torch)
endif ()
# find packages
find_package(Torch REQUIRED QUIET)
add_executable(${PROJECT_NAME} main.cpp)
include_directories(${TORCH_INCLUDE_DIRS})
target_link_libraries(${PROJECT_NAME} ${TORCH_LIBRARIES})
Following is the main.cpp file contents
#include <iostream>
#include <torch/torch.h>
#include <torch/cuda.h>
int main() {
if (torch::cuda::is_available()) {
std::cout << "Cuda available\n";
} else {
std::cout << "Cuda not available\n";
}
}
Downloaded the libtorch from here and extracted in /opt/libtorch