std::vector<torch::Tensor> params;
for (size_t i = 0; i < 3; i++)
{
params.push_back (torch::randn (10));
}
auto options = torch::optim::AdamOptions (learning_rate);
// Construct the optimizer without moving param_groups
torch::optim::Adam optimizer (params, options); // base learning rate
I am unable to create the optimizer object without hitting a bad access error.