Hi,
I guess the simplest would be to create a custom version of the Linear
layer that uses your Dropout
layer. Something like:
# I did not tested this code, it might contain typos ! :)
class MyLinear(nn.Linear):
def __init__(self, in_feats, out_feats, drop_p, bias=True):
super(MyLinear, self).__init__(in_feats, out_feats, bias=bias)
self.masker = Dropout(p=drop_p)
def forward(self, input):
masked_weight = self.masker(self.weight)
return F.linear(input, masked_weight, self.bias)