Does not have grad_fn

hi i was trying to build a model with call backs but in intermediate of training i got this error.

code:

class DataBunch():
    def __init__(self , train_dl , valid_dl , c =None):
        self.train_dl = train_dl
        self.valid_dl = valid_dl
        self.c = c
    @property
    def train_ds(self):
        return self.train_dl.ds
    @property
    def valid_ds(self):
        return self.valid_dl.ds


data = DataBunch(train_dl , valid_dl , c)

def get_model(data, lr=0.01, nh=50):
    m = data.train_ds.x.shape[1]
    model = torch.nn.Sequential(torch.nn.Linear(m,nh), torch.nn.ReLU(), torch.nn.Linear(nh,data.c))
    return model, torch.optim.SGD(model.parameters(), lr=lr)

class Learner():
    def __init__(self, model, opt, loss_func, data):
        self.model,self.opt,self.loss_func,self.data = model,opt,loss_func,data

learn = Learner(*get_model(data), loss_func, data)

class Callback():
    def begin_fit(self):
        return False
    def after_fit(self): 
        return False
    def begin_epoch(self):
        return False
    def begin_validate(self): 
        return False
    def after_epoch(self): 
        return False
    def begin_batch(self):
        return False
    def after_loss(self):
        return False
    def after_backward(self):
        return False
    def after_step(self):
        return False
    def after_batch(self):
        return False
    def after_pred(self):
        return False


class Runner():
    stop = False
    in_train = True
    i=0
    @property
    def opt(self):       return self.learn.opt
    
    @property
    def model(self):     return self.learn.model
    
    @property
    def loss_func(self): return self.learn.loss_func
    
    @property
    
    def data(self):      return self.learn.data
    def __init__(self,cb):
        self.cb = cb()
    
    def fit(self,epoch , learner):
            self.learn = learner
            self.current_epoch = None
#         try:    
            if self('begin_fit')(): return 'begin_fit'
            for epoch in range(epoch):
                self.current_epoch = epoch
                if self('begin_epoch')():return 'begin_epoch'
                self.all_batches(self.data.train_dl)
                
                with torch.no_grad():
                    if self('begin_validate')(): return 'validate begin'
                    self.all_batches(self.data.valid_dl)
                if self('after_epoch')():
                    break
#         except:print('oh')
#         finally:
#             self('after_fit')()
   
    def all_batches(self , dl):
        self.iters = len(dl)
        for x,y in dl:
            if self.stop : break
            self.one_batch(x,y)
            self('after_batch')
        self.stop = False
    
    def one_batch(self,x,y):
        self.x , self.y = x,y
        if self('begin_batch')():return
        self.preds = self.model(x)
#         print(self.preds)
        if self('after_pred')():return
        self.loss = self.loss_func(self.preds , self.y)
        if self('after_loss')() and not self.in_train:return
        self.loss.backward()
        self.i+=1
        print(self.loss)
        if self('after_backward')():return
        with torch.no_grad():
            self.opt.step()
            if self('after_step')(): return
            self.opt.zero_grad()
    
    
    def __call__(self , name):
        f =  getattr(self.cb , name )
        return f 


a = Runner(Callback)
a.fit(1 , learn)

error was:

element 0 of tensors does not require grad and does not have a grad_fn

this error was shown after training some some mini batch, some thing like this:

tensor(0.0938, grad_fn=)
tensor(0.3338, grad_fn=)
tensor(0.3208, grad_fn=)
tensor(0.0983, grad_fn=)
tensor(0.1380, grad_fn=)

RuntimeError Traceback (most recent call last)
in
1 a = Runner(Callback)
----> 2 a.fit(1 , learn)

Hi,

If your output does not require gradients, you need to check where it stops.
You can add print statements in your code to check t.requires_grad to pinpoint the issue.

thanks , i figured it out, it was a great way!
i was trying to do .backward() after detaching the gradients.

1 Like