RuntimeError: shape '[64, 19, 415]' is invalid for input of size 61189120

I was trying to recreate someone else’s work, I have added a few print statements to find out where exactly the issue is occurring.

The problem occurs in the torch.reshape function, in the forward method of the DiffusionGraphConv class, it is located in “eeg-gnn-ssl-main/model/cell.py”

Link to the original github repository can be found in the first cell of the colab notebook
Link to colab notebook for more details = Google Colab
Link to my repository with the changes I made = eeg-gnn-ssl-main - Google Drive


RuntimeError Traceback (most recent call last)
Cell In[16], line 33
30 model = model.to(device)
32 # Train
—> 33 train(model, dataloaders, device)
35 # Load best model after training finished
36 best_path = os.path.join(args.save_dir, ‘best.pth.tar’)

Cell In[15], line 58, in train(model, dataloaders, device)
55 # Forward
56 # (batch_size, num_classes)
57 if args.model_name == “dcrnn”:
—> 58 logits = model(x, seq_lengths, supports)
59 else:
60 raise NotImplementedError

File /opt/conda/lib/python3.10/site-packages/torch/nn/modules/module.py:1518, in Module._wrapped_call_impl(self, *args, **kwargs)
1516 return self._compiled_call_impl(*args, **kwargs) # type: ignore[misc]
1517 else:
→ 1518 return self._call_impl(*args, **kwargs)

File /opt/conda/lib/python3.10/site-packages/torch/nn/modules/module.py:1527, in Module._call_impl(self, *args, **kwargs)
1522 # If we don’t have any hooks, we want to skip the rest of the logic in
1523 # this function, and just call forward.
1524 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks
1525 or _global_backward_pre_hooks or _global_backward_hooks
1526 or _global_forward_hooks or _global_forward_pre_hooks):
→ 1527 return forward_call(*args, **kwargs)
1529 try:
1530 result = None

Cell In[14], line 49, in DCRNNModel_classification.forward(self, input_seq, seq_lengths, supports)
44 init_hidden_state = self.encoder.init_hidden(
45 batch_size).to(self._device)
47 # last hidden state of the encoder is the context
48 # (max_seq_len, batch, rnn_unitsnum_nodes)
—> 49 _, final_hidden = self.encoder(input_seq, init_hidden_state, supports)
50 # (batch_size, max_seq_len, rnn_units
num_nodes)
51 output = torch.transpose(final_hidden, dim0=0, dim1=1)

File /opt/conda/lib/python3.10/site-packages/torch/nn/modules/module.py:1518, in Module._wrapped_call_impl(self, *args, **kwargs)
1516 return self._compiled_call_impl(*args, **kwargs) # type: ignore[misc]
1517 else:
→ 1518 return self._call_impl(*args, **kwargs)

File /opt/conda/lib/python3.10/site-packages/torch/nn/modules/module.py:1527, in Module._call_impl(self, *args, **kwargs)
1522 # If we don’t have any hooks, we want to skip the rest of the logic in
1523 # this function, and just call forward.
1524 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks
1525 or _global_backward_pre_hooks or _global_backward_hooks
1526 or _global_forward_hooks or _global_forward_pre_hooks):
→ 1527 return forward_call(*args, **kwargs)
1529 try:
1530 result = None

Cell In[13], line 47, in DCRNNEncoder.forward(self, inputs, initial_hidden_state, supports)
45 output_inner =
46 for t in range(seq_length):
—> 47 _, hidden_state = self.encoding_cells[i_layer](
48 supports, current_inputs[t, …], hidden_state)
49 output_inner.append(hidden_state)
50 output_hidden.append(hidden_state)

File /opt/conda/lib/python3.10/site-packages/torch/nn/modules/module.py:1518, in Module._wrapped_call_impl(self, *args, **kwargs)
1516 return self._compiled_call_impl(*args, **kwargs) # type: ignore[misc]
1517 else:
→ 1518 return self._call_impl(*args, **kwargs)

File /opt/conda/lib/python3.10/site-packages/torch/nn/modules/module.py:1527, in Module._call_impl(self, *args, **kwargs)
1522 # If we don’t have any hooks, we want to skip the rest of the logic in
1523 # this function, and just call forward.
1524 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks
1525 or _global_backward_pre_hooks or _global_backward_hooks
1526 or _global_forward_hooks or _global_forward_pre_hooks):
→ 1527 return forward_call(*args, **kwargs)
1529 try:
1530 result = None

File /kaggle/input/syi-tang-my-beloved/eeg-gnn-ssl-main/model/cell.py:247, in DCGRUCell.forward(self, supports, inputs, state)
244 else:
245 fn = self._fc
246 value = torch.sigmoid(
→ 247 fn(supports, inputs, state, output_size, bias_start=1.0))
248 value = torch.reshape(value, (-1, self._num_nodes, output_size))
249 r, u = torch.split(
250 value, split_size_or_sections=int(
251 output_size / 2), dim=-1)

File /opt/conda/lib/python3.10/site-packages/torch/nn/modules/module.py:1518, in Module._wrapped_call_impl(self, *args, **kwargs)
1516 return self._compiled_call_impl(*args, **kwargs) # type: ignore[misc]
1517 else:
→ 1518 return self._call_impl(*args, **kwargs)

File /opt/conda/lib/python3.10/site-packages/torch/nn/modules/module.py:1527, in Module._call_impl(self, *args, **kwargs)
1522 # If we don’t have any hooks, we want to skip the rest of the logic in
1523 # this function, and just call forward.
1524 if not (self._backward_hooks or self._backward_pre_hooks or self._forward_hooks or self._forward_pre_hooks
1525 or _global_backward_pre_hooks or _global_backward_hooks
1526 or _global_forward_hooks or _global_forward_pre_hooks):
→ 1527 return forward_call(*args, **kwargs)
1529 try:
1530 result = None

File /kaggle/input/syi-tang-my-beloved/eeg-gnn-ssl-main/model/cell.py:151, in DiffusionGraphConv.forward(self, supports, inputs, state, output_size, bias_start)
147 print(“Number of matrices:”, num_matrices)
149 print(“Shape of x before reshaping:”, x.shape)
→ 151 x = torch.reshape(
152 x,
153 shape=[
154 batch_size,
155 self._num_nodes,
156 input_size *
157 num_matrices])
158 x = torch.reshape(
159 x,
160 shape=[
(…)
163 input_size *
164 num_matrices])
165 # (batch_size * self._num_nodes, output_size)

RuntimeError: shape ‘[64, 19, 415]’ is invalid for input of size 61189120

anyone willing to help please??

Your code is neither properly formatted nor executable, which helps in debugging.
Based on the stacktrace this reshape fails:


[quote="BabuRao69420, post:1, topic:199299"]
File /kaggle/input/syi-tang-my-beloved/eeg-gnn-ssl-main/model/cell.py:151, in DiffusionGraphConv.forward(self, supports, inputs, state, output_size, bias_start)
147 print(“Number of matrices:”, num_matrices)
149 print(“Shape of x before reshaping:”, x.shape)
→ 151 x = torch.reshape(
152 x,
153 shape=[
154 batch_size,
155 self._num_nodes,
156 input_size *
157 num_matrices])
[/quote]
but it's unclear what this line is supposed to do without further information.