Custom convolution layer

Hi,

I decided to return to my problem. I checked everything ones again and confirm that my class MyConv2d works well. I compared a output form my layer with output from torch.nn.Conv2d(with fixed weights equal to weights from my layer, without bias) and outputs are equals, but…

When I created a simple network with my Layer(code below) I discovered that the problem is with back-propagation. All weights in my layer are fixed and don’t change during training. How to force my layer to be trainable?

I suppose that loops(code below) are problematic and PyTorch can’t create proper graph to calculate gradient for back-propagation.

def forward(self, x):
        width = self.calculateNewWidth(x)
        height = self.calculateNewHeight(x)
        windows = self.calculateWindows(x)
        
        result = torch.zeros(
            [x.shape[0] * self.out_channels, width, height], dtype=torch.float32, device=device
        )

        for channel in range(x.shape[1]):
            for i_convNumber in range(self.out_channels):
                xx = torch.matmul(windows[channel], weights[i_convNumber][channel]) 
                xx = xx.view(-1, width, height)
                result[i_convNumber * xx.shape[0] : (i_convNumber + 1) * xx.shape[0]] += xx
                
        result = result.view(x.shape[0], self.out_channels, width, height)
        return result  
    def __init__(self):
        super(CnnModel, self).__init__()
        #self.conv1 = nn.Conv2d(3, 64, 3, bias=False)
        #self.conv2 = nn.Conv2d(64, 32, 3, bias=False)
        
        self.conv1 = MyConv2d(3, 64, 3)
        self.conv2 = MyConv2d(64, 32, 3)

        self.pool = nn.MaxPool2d(2, 2)

        self.fc1 = nn.Linear(32 * 6 * 6, 128)
        self.fc2 = nn.Linear(128, 64)
        self.fc3 = nn.Linear(64, 10)

    def forward(self, x):
        x = self.conv1(x)
        x = F.relu(x)
        x = self.pool(x)

        x = self.conv2(x)
        x = F.relu(x)
        x = self.pool(x)
        
        x = x.view(-1, 32 * 6 * 6)

        x = F.relu(self.fc1(x))
        x = F.relu(self.fc2(x))
        x = self.fc3(x)
        
        return x