I cannot solve the following mystery for hours:
class Convolution(nn.Module):
def __init__(self, args):
super(Convolution, self).__init__()
self.windows = [1,2,3,4,5]
self.window_convolutions = \
nn.ModuleList( [nn.Conv1d(args.cov_dim, args.mem_dim, i) for i in self.windows ])
def forward(self, input, args):
for window in self.windows:
print("input shape", input.shape)
print("params ", args.cov_dim, args.mem_dim, window)
input = input.view(1, input.size()[0], input.size()[1]).transpose(1, 2)
print("input shape", input.shape)
conv_model = self.window_convolutions[window]
convolved = conv_model(input)[0].transpose(0, 1)
print("convolved shape ", convolved.shape)
Gives the following output:
input shape torch.Size([97, 150])
params 150 150 1
input after view torch.Size([1, 150, 97])
convolved shape torch.Size([96, 150])
Notice that convolved shape is one less on zero dim. Why?
Some mystery for me…