I am trying to create a custom parameter as a scale factor to resize the tensor via bilinear interpolation
self.scl = nn.Parameter(torch.tensor(0.5, requires_grad=True))
def forward(self, inx):
x = self.lrelu(self.part1_PW(inx))
x0 = x
#==========
b, c, h, w = x.shape
scl = self.scl.item()
scl = round(scl, 1)
#print(scl)
if scl == 0:
scl = 1.0
elif scl <0:
if scl > -1:
scl = scl*(-1.0)
else:
scl = 1.0
else:
if scl > 1:
scl = 1.0
downscale = nn.Upsample(size=(int(h * scl), int(w * scl)), mode='bilinear', align_corners=False)
But when I checked, I realized the weights aren’t updating
is the problem from the fact that I didn’t use the weights on the input tensors?