Can't optimize a non-leaf Tensor

import torch
from torch.autograd import Variable
import torch.nn.functional as f
import numpy as np
A = torch.Tensor([[1,2,3],[4,5,6],[7,8,9]])
#print(A.shape)
B = Variable(torch.FloatTensor([0.5,0.3,0.2]),requires_grad = True)

#print(B[0],B[1],B[2])
B[0], B[1], B[2] = f.softmax(B, dim=0)
#print(C.shape)

for i in range(1000):

C = B[0] A + B[1] torch.spmm(A, A) + B[2] * torch.spmm(A, torch.spmm(A, A))
loss = C[0][0] + C[1][1] + C[2][2]
optimizer = torch.optim.SGD((B[0],B[1],B[2]),lr=0.02)
optimizer.zero_grad()
loss.backward(retain_graph=True)
optimizer.step()
print(B[0],B[1],B[2])