Custom Full Gated Convolutional Layer

class FullGatedConv2D(Conv2D):
    """Gated Convolutional Class"""

    def __init__(self, filters, **kwargs):
        super(FullGatedConv2D, self).__init__(filters=filters * 2, **kwargs)
        self.nb_filters = filters

    def call(self, inputs):
        """Apply gated convolution"""

        output = super(FullGatedConv2D, self).call(inputs)
        linear = Activation("linear")(output[:, :, :, :self.nb_filters])
        sigmoid = Activation("sigmoid")(output[:, :, :, self.nb_filters:])

        return Multiply()([linear, sigmoid])

    def compute_output_shape(self, input_shape):
        """Compute shape of layer output"""

        output_shape = super(FullGatedConv2D, self).compute_output_shape(input_shape)
        return tuple(output_shape[:3]) + (self.nb_filters * 2,)

    def get_config(self):
        """Return the config of the layer"""

        config = super(FullGatedConv2D, self).get_config()
        config['nb_filters'] = self.nb_filters
        del config['filters']
        return config

I have the above Keras implementation of Custom Gated Convolutional Layer. I want to use similar for my existing pytorch implementation for which i have tried following at stuck-

class gatedConv(Module):
     def __init__(self,filters):
         super(gatedConv,self).__init__()

         self.filters=filters*2

     def forward(self,inputs):
         output = super(gatedConv, self).call(inputs)
         linear=torch.nn.Linear(output[:, :, :, :self.filters])
         m=torch.nn.Sigmoid()
         sigmoid=m(output[:, :, :, :self.filters])
         return linear*sigmoid

But Linear activation is not nn. Linear so should i need to custom implement it and how should i inherit Conv2D in pytorch is it similar to keras . Anyone please let me know the path forward. Thankng in advance

You would need to initialize the nn.Linear layer before using it.
Take a look at this tutorial which shows simple model definitions. In particular you will see that layers are initialized in the __init__ method and used in the forward.