I want to print the gradient values before and after doing back propagation, but i have no idea how to do it.
if i do loss.grad
it gives me None.
can i get the gradient for each weight in the model (with respect to that weight)?
sample code:
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv11 = nn.Conv2d(3, 64, 3, padding = 1 )
self.pool1 = nn.AvgPool2d(2, 2)
self.conv21 = nn.Conv2d(64, 64*2, 3, padding = 1 )
self.pool2 = nn.AvgPool2d(2, 2)
self.conv52 = nn.Conv2d(64*2, 10, 1)
self.pool5 = nn.AvgPool2d(8, 8)
def forward(self, x):
x = F.relu(self.conv11(x))
x = self.pool1(x)
x = F.relu(self.conv21(x))
x = self.pool2(x)
x = self.conv52(x)
x = self.pool5(x)
x = x.view(-1, 10)
return x
net = Net()
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
print(device)
net.to(device)
inputs = torch.rand(4,3,32,32)
labels = torch.rand(4)*10//5
criterion = nn.CrossEntropyLoss()
optimizer = optim.SGD(net.parameters(), lr=0.01, momentum=0.9)
inputs = inputs.to(device)
labels = labels.to(device)
outputs = net(inputs)
loss = criterion(outputs, labels.long() )
print(loss.grad)
loss.backward()
print(loss.grad)
optimizer.step()