About loss grad

I am a beginner and trying to build a 2 linear nn, but have some problems to calculate the loss gradient, following are my codes, simple fuctions just want to be familiaar with Pytorch.

import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
import torch.optim as optim

class MeinNetz(nn.Module):
def init(self):
super(MeinNetz, self).init()
self.lin1 = nn.Linear(10,10)
self.lin2 = nn.Linear(10,10)

def forward(self, x):
x = F.relu(self.lin1(x))
x = self.lin2(x)
return x

def num_flat_features(self, x):
size = x.size()[1:]
num = 1
for i in size:
num*=i
return num

netz = MeinNetz()

for i in range(100):
x=torch.randn(10,10)
x=Variable(x,requires_grad=True)

 inpu = x
 


 
 out = netz(inpu)
 out = Variable(out)
 b = torch.rand(10)
 target = Variable(b,requires_grad=True)
 criterion = nn.MSELoss()
 

 loss = criterion(out, target)

 print(loss)
 print(loss.grad)

 #netz.zero_grad()
 #loss.backward()
 #optimizer = optim.SGD(netz.parameters(), lr=0.5)
 #optimizer.step()

here is report:
loss = criterion(out, target)
File “/opt/anaconda2/lib/python2.7/site-packages/torch/nn/modules/module.py”, line 491, in call
result = self.forward(*input, **kwargs)
File “/opt/anaconda2/lib/python2.7/site-packages/torch/nn/modules/loss.py”, line 371, in forward
_assert_no_grad(target)
File “/opt/anaconda2/lib/python2.7/site-packages/torch/nn/modules/loss.py”, line 12, in _assert_no_grad
"nn criterions don’t compute the gradient w.r.t. targets - please "
AssertionError: nn criterions don’t compute the gradient w.r.t. targets - please mark these tensors as not requiring gradients

In the above line, unset the requires_grad i.e., modify the value to False.

Thank you very much, now it works:grin: