How to print the computed gradient values for a model pytorch?

Hello everyone,
I want to know the value of the gradient in each iteration of the algorithm, but I do not have any idea. Could you help me?

import matplotlib.pyplot as plt
import numpy as np

import torch
import torch.optim as optim
import torch.nn as nn

n=500
d=2
np.random.seed(42)
x=np.random.randn(n,d)
epsilon=np.random.randn(n,1)
b=np.ones([1,1])
w=2np.ones([1,d])
x0=np.ones([n,1])
y=b+np.dot(x,np.transpose(w))+.1
epsilon
idx=np.arange(n)
np.random.shuffle(idx)
idx_train=idx[:int(.8n)]
idx_val=idx[int(.8
n):]
x_train,y_train=x[idx_train],y[idx_train]
x_val,y_val=x[idx_val],y[idx_val]

device = ‘cuda’ if torch.cuda.is_available() else ‘cpu’
x_train_tensor = torch.as_tensor(x_train).float().to(device)
y_train_tensor = torch.as_tensor(y_train).float().to(device)
torch.manual_seed(13)
model = nn.Sequential(nn.Linear(d, 1)).to(device)
losses=[]
n_epochs=1
lr=.1
loss_fn = nn.MSELoss(reduction=‘mean’)
optimizer = optim.SGD(model.parameters(), lr=lr)
for epoch in range(n_epochs):
model.train()
yhat = model(x_train_tensor)
loss = loss_fn(yhat, y_train_tensor)
t=loss.item()
losses.append(t)
loss.backward()
optimizer.step()
optimizer.zero_grad()

Can you see if this helps? here