```
class Mask(nn.Module):
def __init__(self):
super(Mask, self).__init__()
self.weight = torch.nn.Parameter(data=torch.Tensor(outC, inC, kernel_size,
kernel_size), requires_grad=True)
stddev = 1.0/math.sqrt(inC*kernel_size*kernel_size)
self.weight.data.uniform(-stddev, stddev)
self.weight = self.weight.contiguous()
self.func()
def func(self):
...
def forward(self, inp):
masked_wt = self.weight.mul(self.mask.cuda())
return torch.nn.functional.Conv2d(inp, masked_wt, stride=self.stride, padding=self.padding)
class Model(nn.Module):
def __init__(self, inC=3, outC=32, kernel_size=1, stride=1, padding=0,
groups=2, bisa=None):
self.Mask = Mask(inC, outC, kernel_size, stride, groups=groups, bias=bias)
def forward(self,x):
x = Mask(x)
return x
```

I Intialised Mask.weight as a nn.Parameter. I’m using this module in a larger network. After training when i print all the parameters using named_parameters() iterator. The custom layer weights are not appearing.