import torch import torch.nn as nn import torch.nn.functional as F import torch.nn.utils.rnn class Model(nn.Module): def __init__(self): super(Model, self).__init__() self.lstm = nn.LSTM(batch_first=True, bidirectional=True, input_size=8, hidden_size=252) def forward(self): output = torch.cuda.DoubleTensor(1,3,8) output, (hn, cn) = self.lstm(output) return output model = Model().cuda() output = model() print(output[0, 0, 0])
This code arised an cuda runtime error: ‘an illegal memory access was encountered’. But when I changed the hidden dimentions of LSTM to 251, the error is missing. Can anyone give me a favor? Thanks a lot.