Hello, I am trying to build a convolution-based classifier for some time series data. while running the code I get an error saying -
RuntimeError: size mismatch, m1: [1 x 20592], m2: [20526 x 20658]
The model class is as follows -
class timeSeriesConv(nn.Module):
def __init__(self, channels, seq_length, kernel_size=3, K=2):
super(timeSeriesConv, self).__init__()
self.channels = channels
self.seq_length = seq_length
self.kernel = kernel_size
self.Conv1D = nn.Conv1d(in_channels=self.channels,
out_channels=self.channels,
kernel_size=self.kernel,
stride=1)
self.criterion = nn.CrossEntropyLoss().cuda()
self.depthwiseConv = nn.Conv1d(in_channels=self.channels,
out_channels=K * self.channels,
kernel_size=self.kernel,
stride=1)
self.fc1 = nn.Linear(in_features=K*self.channels * (self.seq_length - 2*(self.kernel-1)), out_features=(K*self.channels*self.seq_length))
self.fc2 = nn.Linear(in_features=(K*self.channels*self.seq_length), out_features=4)
def forward(self, X):
out = nn.functional.elu(X)
out = self.depthwiseConv(out)
out = out.view(out.size(0), -1)
out = nn.functional.elu(out)
out = self.fc1(out)
out = nn.functional.elu(out)
return (self.fc2(out))
where channels = 22, seq_length = 313, K = 3 and kernel_size =2
input shape is [1, 22, 313]
Error is being thrown at the line
out = self.fc1(out)
Please tell me where I am going wrong.
TIA