Valueerror: can't optimize a non-leaf Variable

Getting the above error, mu and s are my variable. any fix?

class VI_new(object):

def __init__(self,fn,mu_start,var_start,no_of_samples,n_epoch):

    self.fn = fn
    self.mu_start = mu_start
    self.s_start = math.log(math.exp(var_start)-1)
    self.no_of_samples = no_of_samples
    self.n_epoch = n_epoch


def vi_approx(self):

    # mu = Variable(torch.FloatTensor([self.mu_start]),requires_grad = True)
    # s = Variable(torch.FloatTensor([self.s_start]),requires_grad = True)

    mu = Variable(torch.randn(1), requires_grad = True)
    s =  Variable(torch.randn(1), requires_grad = True)

    mu.data = torch.FloatTensor([self.mu_start])
    s.data = torch.FloatTensor([self.s_start])
    #s.retain_grad()
    std_normal = Variable(torch.randn(self.no_of_samples))
    z = mu+torch.sqrt(torch.log(1+torch.exp(s)))*std_normal
    sum = 0
    for s in z:
        sum +=self.fn(s)

    loss1 = -sum/self.no_of_samples
    loss2 = -.5*torch.log(torch.log(1+torch.exp(s)))
    loss = loss1 - loss2
    optimizer = optim.Adam([mu,s],lr=.001)

    mu_arr = []
    var_arr = []
    for _ in range(self.n_epoch):
        loss.backward()
        optimizer.step()
        print(loss)
        mu_arr.append(mu.data.numpy()[0])
        var = math.log(1+math.exp(s.data.numpy()[0]))
        var_arr.append(var)

    return mu_arr, var_arr`Preformatted text`
1 Like

The error suggests that s and mu are the results of some calculations, yet I can’t see what is wrong with the code.

What do mu_start and s_start look like? Are they scalar values?

N.B. the code formatting tool works differently if you select multiple lines before using it.