Muti-Layers LSTM 0.3.0 worked, but 0.3.1 does'nt work

import torch
import torch.autograd as autograd
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim

torch.manual_seed(1)

initialize the hidden state.

hidden = (autograd.Variable(torch.randn(3, 1, 3)),
autograd.Variable(torch.randn((3, 1, 3))))
for i in inputs:
# Step through the sequence one element at a time.
# after each step, hidden contains the hidden state.
out, hidden = lstm(i.view(1, 1, -1), hidden)

0.3.1.post2
Traceback (most recent call last):
File “t7.py”, line 21, in
out, hidden = lstm(i.view(3, 1, -1), hidden)
File “C:\ProgramData\Anaconda3\lib\site-packages\torch\nn\modules\module.py”, line 357, in call
result = self.forward(*input, **kwargs)
File “C:\ProgramData\Anaconda3\lib\site-packages\torch\nn\modules\rnn.py”, line 190, in forward
self.check_forward_args(input, hx, batch_sizes)
File “C:\ProgramData\Anaconda3\lib\site-packages\torch\nn\modules\rnn.py”, line 158, in check_forward_args
’Expected hidden[0] size {}, got {}’)
File “C:\ProgramData\Anaconda3\lib\site-packages\torch\nn\modules\rnn.py”, line 154, in check_hidden_size
raise RuntimeError(msg.format(expected_hidden_size, tuple(hx.size())))
RuntimeError: Expected hidden[0] size (1, 1, 3), got (3, 1, 3)