Got “Trying to backward through the graph a second time”

Hi,

I’ve seen other people got this error using loop-like operations and set retain_graph=True can avoid it somehow, but I am still confused how I get this.

class Smoother(nn.Module):
def init(self, in_dim, out_dim, k, device):
super().init()
assert k % 2 == 1
self.base = torch.linspace(-1, 1, steps=k, device=device, requires_grad=False).float()
self.alpha = torch.ones(1, device=device, requires_grad=True).float()
self.beta = torch.zeros(1, device=device, requires_grad=True).float()
self.theta = torch.ones(1, device=device, requires_grad=True).float()/(k/2)
self.omega = torch.zeros(1, device=device, requires_grad=True).float()
self.weight = self.base*self.alpha+self.beta
self.weight = torch.sigmoid(self.weight)
self.weight = self.weight * self.theta + self.omega
self.weight = self.weight.expand(out_dim, in_dim, k)
p = (k-1)//2
self.padding = nn.ReplicationPad1d((2 * p, 0))
def forward(self, x):
# b,c,l
x = self.padding(x)
x = F.conv1d(x, self.weight)
return x

class Net(nn.Module):
def init(self, device):
super(Net, self).init()
self.sm = Smoother(in_dim=16, out_dim=16, k=129, device=device)
def forward(self, x):
x = self.sm(x)
return x
if name == ‘main’:
device=torch.device(‘cuda:0’)
m = Net(device)
a = torch.rand((3, 16, 512)).float().cuda()
b = m(a)
loss=…

I thought I only initialized the function only once. Is that because of the “expand” operation?
Thanks!

Best regards