Hi,
I’m new in PyTorch and trying to reimplement FC-DenseNet.
So, I defined the up sampling layer as:
class TransUpBlock(nn.Module):
def __init__(self, num_input_feat, num_output_feat):
super(TransUpBlock, self).__init__()
self.deconv = nn.ConvTranspose2d(num_input_feat,
num_output_feat, kernel_size=3, stride=2, padding=0, bias=False)
def forward(self, input_, skip):
output_ = self.deconv(input_)
output_ = center_crop(output_, skip.size(2), skip.size(3)) # From PyTorch Tiramiso
output_ = torch.cat([output_, skip], 1) # From PyTorch Tiramiso
return output_
where the training code contains:
model = model.cuda()
.
.
.
for i , (input, target) in enumerate(train_loader):
target = target.cuda(async=True)
input = input.cuda()
input_var = torch.autograd.Variable(input)
target_var = torch.autograd.Variable(target)
output = model(input_var)
When I feed the network, I got this error message:
File "My_Train.py", line 271, in train
output = model(input_var) #(A) performs forward pass
File "/usr/lib64/python2.7/site-packages/torch/nn/modules/module.py", line 206, in __call__
result = self.forward(*input, **kwargs)
File "/home/boroujerdi/Dokumente/CVPR_2017_Open_Access_Repository/DenseNet/My_IMP/My_Net.py", line 215, in forward
output_ = self.transUpBlocks[i](output_, skip)
File "/usr/lib64/python2.7/site-packages/torch/nn/modules/module.py", line 206, in __call__
result = self.forward(*input, **kwargs)
File "/home/boroujerdi/Dokumente/CVPR_2017_Open_Access_Repository/DenseNet/My_IMP/My_Net.py", line 92, in forward
output_ = self.deconv(input_)
File "/usr/lib64/python2.7/site-packages/torch/nn/modules/module.py", line 206, in __call__
result = self.forward(*input, **kwargs)
File "/usr/lib64/python2.7/site-packages/torch/nn/modules/conv.py", line 524, in forward
output_padding, self.groups, self.dilation)
File "/usr/lib64/python2.7/site-packages/torch/nn/functional.py", line 137, in conv_transpose2d
return f(input, weight, bias)
RuntimeError: CUDNN_STATUS_BAD_PARAM
Do you think there is something wrong with my cudnn?
Any kind of help will be appreciated.