Hi @ptrblck, I have this values
inpt1 → torch.Size([64, 512])
inpt2 → torch.Size([64, 73728])
inpt3 → torch.Size([64, 65536])
if I do this
class EnsembleModel(nn.Module):
def __init__(self,out_size,training=True):
super().__init__()
# self.fc1 = nn.Linear(8192,512)
self.fc2 = nn.Linear(131072,512)
self.fc3 = nn.Linear(82944,512)
self.fc4 = nn.Linear(512,out_size)
def forward(self,inp1,inp2,inp3):
print(inp1.shape)
print(inp2.shape)
print(inp3.shape)
# out1 = self.fc1(F.dropout(inp1,training=self.training))
out2 = self.fc2(F.dropout(inp2,training=self.training))
out3 = self.fc3(F.dropout(inp3,training=self.training))
out = out1 + out2 + out3
out = self.fc4(F.dropout(out,training=self.training))
return out
I get this:
torch.Size([64, 73728])
torch.Size([64, 65536])
RuntimeError: size mismatch, m1: [64 x 73728], m2: [131072 x 512] at /pytorch/aten/src/THC/generic/THCTensorMathBlas.cu:283
no clue what is going on....