intmain(){ // Check if CUDA is available if (torch::cuda::is_available()) { std::cout << "CUDA is available!" << std::endl; } else { std::cout << "CUDA not available." << std::endl; }
// Create a generic tensor (3x3 identity standard matrix) // bcs we create it by default so the tensor is on host side torch::Tensor tensor = torch::eye(3); std::cout << "Identity Matrix:\n" << tensor << std::endl;
return0; }
随后编辑Cmake配置,内容如下:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17
# File name: CMakeLists.txt
# same with https://docs.pytorch.org/cppdocs/installing.html cmake_minimum_required(VERSION 3.18 FATAL_ERROR) project(torch-cpp)
# alternatively, we can set env in shell or pass by -DCMAKE_PREFIX_PATH set(CMAKE_PREFIX_PATH "/home/blackcat/program/anaconda3/envs/modelopt/lib/python3.12/site-packages/torch/share/cmake")
# we just need Torch package find_package(Torch REQUIRED)