I have no idea why is this happening?

I am getting a size mismatch error and I have no idea why?

class Model(nn.Module):
		     outputs = {'layer1':[], 'layer2':[], 'layer3':[], 'output':[]} # For experimentation
		     def __init__(self, input, hidden, output):
			     super(Model, self).__init__()
			     self.linear1 = nn.Linear(input, hidden)
			     self.linear2 = nn.Linear(hidden, hidden)
			     self.linear3 = nn.Linear(hidden, output)
		     def forward(self, x):
			     out = F.relu(self.linear1(x))
			     self.outputs['layer1'].append(out)
			     out = F.relu(self.linear2(x))
			     self.outputs['layer2'].append(out)
			     out = self.linear3(out)
			     self.outputs['output'].append(out)
			     return out

		     

model = Model(input=20, hidden=100, output=2)
model(x) # Size mismatch 50000x20 and 100x100

You should feed the output of the first linear layer into the second one:

out = F.relu(self.linear1(x))
self.outputs['layer1'].append(out)
out = F.relu(self.linear2(out))

The second layer is expecting and input of * x 100 and you are feeding it * x 20