If you would like to define a custom kernel, you could just set the weight
attribute to it.
Here is a small example:
class MyModel(nn.Module):
def __init__(self, kernel):
super(MyModel, self).__init__()
self.conv1 = nn.Conv2d(1, 1, 3, 1, 1, bias=False)
# Add other layers here
# Initialize conv1 with custom kernel
self.conv1.weight = nn.Parameter(kernel)
def forward(self, x):
x = self.conv1(x)
# pass x to other modules
return x
kernel = torch.randn(1, 1, 3, 3) # Define your custom kernel
model = MyModel(kernel)
x = torch.randn(1, 1, 24, 24)
output = model(x)