Torch::cuda::is_available() return 0

I compiled pytorch from source code. And I got libc10.so, libc10_cuda.so, libtorch.so, libtorch_gpu.so, libtorch_cuda.so.

When using torch::cuda::is_available() api to check cuda, it returned false. I confirmed that 4 GPUs are avalaible on the machine and torch.cuda.is_available() return False in python.

Please help to tell me how to fix it. Thx!

Hi, I think the CUDA path is not set properly. You can fix this using these commands:

export PATH=/usr/local/cuda-11.3/bin/:$PATH

and,

export LD_LIBRARY_PATH=/usr/local/cuda-11.3/lib64:$LD_LIBRARY_PATH

PS: check your CUDA path, update the commands and then use it. Thanks!

These Env Vars already added.

(base) root@b92d948aa67e:/workspace/code/example# echo $PATH
/root/.vscode-server/bin/6261075646f055b99068d3688932416f2346dd3b/bin/remote-cli:/root/anaconda3/bin:/root/anaconda3/condabin:/usr/local/nvidia/bin:/usr/local/cuda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/root/anaconda3/bin/conda:/root/anaconda3/bin/conda
(base) root@b92d948aa67e:/workspace/code/example# echo $LD_LIBRARY_PATH
/usr/local/nvidia/lib:/usr/local/nvidia/lib64

source code:
#include
#include <torch/torch.h>
#include <torch/script.h>
#include

int main() {
std::cout << "CUDA: " << torch::cuda::is_available() << std::endl;
std::cout << "CUDNN: " << torch::cuda::cudnn_is_available() << std::endl;
std::cout << "GPU(s): " << torch::cuda::device_count() << std::endl;

torch::cuda::is_available();

auto device = torch::kCPU;
if(torch::cuda::cudnn_is_available()) {
    device = torch::kCUDA;
}
int size = 4;

auto t = torch::tensor({1});

auto a = torch::randn({size, 9}).to(device);
auto b = torch::randn({size, 9}).to(device);
auto c = a + b;

std::cout << c << std::endl;

return 0;

}

CmakeLists.txt
cmake_minimum_required(VERSION 3.0 FATAL_ERROR)

project(main)

set(CMAKE_INSTALL_RPATH “/workspace/code/mytorch/pytorch/build/lib/”)

set(CMAKE_CXX_FLAGS “${CMAKE_CXX_FLAGS} -std=c++14 -g”)

add_executable(main main.cpp)

target_include_directories(main PUBLIC

“/workspace/code/pytorch/torch/csrc/api/include”

“/workspace/code/pytorch/torch/include”

)

target_link_libraries(main

“/workspace/code/mytorch/pytorch/build/lib/libc10.so”

“/workspace/code/mytorch/pytorch/build/lib/libc10_cuda.so”

“/workspace/code/mytorch/pytorch/build/lib/libtorch.so”

“/workspace/code/mytorch/pytorch/build/lib/libtorch_cpu.so”

#“/workspace/code/mytorch/pytorch/build/lib/libtorch_cuda.so”

#“/lib/x86_64-linux-gnu/libm.so”

#“/usr/lib/x86_64-linux-gnu/libdl.so”

)

set_property(TARGET main PROPERTY CXX_STANDARD 14)