Cyclical Learning rate optimiser

I am trying to make a LRScheduler Object for cyclical learning rate with warm restarts. I have written the code and checked it does what it is supposed to but am having difficulty making it a _LRScheduler object. The errors I get when I do

model = Mnist_Logistic()
opt=optim.SGD(model.parameters(), lr=lr)
cylical=Cyc_lr(optimizer=opt , lr_max=0.5 , lr_min=0.01 , datasize=700 , bs=64 , epochs=1)

model,opt = get_model()
loss_func(model(xb), yb)

are :
AttributeError: 'Cyc_lr' object has no attribute 'max_lr' even when self.max_lr is defined in init and taken as input, in this case

AttributeError: 'cyc_lr' object has no attribute 'optimizer' Even though optimiser object has been passed

I have added a last_epoch=-1 argument as it wanted that. Can someone tell me how I can create this object? I don’t get what I am doing wrong.

I looked at the pytorch classes in the docs and followed the pattern.

Here is the Cyclical LR class( get_lr is abt hairy but what formal statements is this class missing?)



from torch.optim.lr_scheduler import _LRScheduler

class Cyc_lr(_LRScheduler):

    def __init__(self,optimizer,lr_max,lr_min,datasize,bs,epochs,last_epoch=-1): 
        
        """Args:
        
        Cyclical Learning rate with warm restart
        
        
        lr_max: maximum learning rate
        lr_min: minimum learning rate
        
        datasize: total number of datapoints in train dataset (excluding val/test)
        bs: batchsize trained on
        epochs: number of epochs to reset learning rate
        
        
        
        """
        super(Cyc_lr, self).__init__(optimizer, last_epoch)

        self.optimizer=optimizer
        self.n = 0
        self.last_epoch=-1
        self.batches=int(datasize/bs) # how many batches in one epoch, one batch is one lr step
        self.points=epochs*self.batches  # how many lr steps for one max to min
        
        self.min_lr = lr_min
        self.max_lr =  lr_max
        if 'self.lr' not in locals(): self.lr=self.max_lr    # create self.lr initially as max lr
        
        
        self.miny=self.min_lr/self.max_lr    # get the ratio and take arccos to know when to stop x in cos function
        
        
        self.stopx=np.arccos(self.miny)      # so that cos points are created only in between the required range
        
        
        
        
        self.x=np.linspace(0,self.stopx+0.5,self.points)
        
        self.y=0.5*np.cos(self.x)+0.5
        
     
        
        self.maxn=len(self.y)-1   # length of cos list 
        
    def get_lr(self):
        self.lr=self.max_lr*self.y[self.n]  # multiply lr by decreasing cos ration
        self.n+=1
        
        if self.lr<self.min_lr:    # incase somethings go wrong
            self.n=0
            self.lr=self.max_lr
            
            self.points=2*self.points
            self.x=np.linspace(0,self.stopx+0.5,self.points)
            self.y=0.5*np.cos(self.x)+0.5
        
        if self.n==self.maxn:
            self.n=0
            self.lr=self.max_lr
            
            self.points=int(1.5*self.points)    # increase points by 1.5x so reaching min takes longer 
            self.x=np.linspace(0,self.stopx+0.5,self.points)
            self.y=0.5*np.cos(self.x)+0.5
            self.maxn=len(self.y)-1
            
        return self.lr

I tried mimicking LambdaLR : https://pytorch.org/docs/stable/_modules/torch/optim/lr_scheduler.html#LambdaLR

I now moved super(Cyc_lr, self).__init__(optimizer, last_epoch) to the bottom of the init block am now getting :



<ipython-input-136-f82e03f14fb9> in __init__(self, optimizer, lr_max, lr_min, datasize, bs, epochs, last_epoch)
     47 
     48         self.maxn=len(self.y)-1   # length of cos list
---> 49         super(Cyc_lr, self).__init__(optimizer, last_epoch)
     50     def get_lr(self):
     51         if 'self.max_lr' not in locals(): self.max_lr=0.1

/Users/admin/anaconda2/lib/python2.7/site-packages/torch/optim/lr_scheduler.pyc in __init__(self, optimizer, last_epoch)
     22                                    "in param_groups[{}] when resuming an optimizer".format(i))
     23         self.base_lrs = list(map(lambda group: group['initial_lr'], optimizer.param_groups))
---> 24         self.step(last_epoch + 1)
     25         self.last_epoch = last_epoch
     26 

/Users/admin/anaconda2/lib/python2.7/site-packages/torch/optim/lr_scheduler.pyc in step(self, epoch)
     49             epoch = self.last_epoch + 1
     50         self.last_epoch = epoch
---> 51         for param_group, lr in zip(self.optimizer.param_groups, self.get_lr()):
     52             param_group['lr'] = lr
     53 

TypeError: zip argument #2 must support iteration

instead of no attribute 'max_lr' as before while no attribute 'optimizer' persists