Key error when applying reflect or replicate padding functional


#1

Hello. I try to pad in a non-zero way but get keyerror.
The code:

a = Variable(torch.from_numpy(np.array([[[[1, 2, 3],[4,5,6],[7, 8, 9]]]])))
print(a.data.shape)
b = F.pad(a,(1,1,1,1),mode="replicate")
c = b.unfold(2,3,1).unfold(3,3,1).contiguous ()

The error report:

KeyError                                  Traceback (most recent call last)
<ipython-input-24-9ce839bba2ed> in <module>()
      9 a = Variable(torch.from_numpy(np.array([[[[1, 2, 3],[4,5,6],[7, 8, 9]]]])))
     10 print(a.data.shape)
---> 11 b = F.pad(a,(1,1,1,1),mode="replicate")
     12 c = b.unfold(2,3,1).unfold(3,3,1).contiguous ()
     13 # print(c.data.shape)

/home/chenchr/anaconda3/lib/python3.6/site-packages/torch/nn/functional.py in pad(input, pad, mode, value)
   1033             return _functions.thnn.ReflectionPad2d.apply(input, *pad)
   1034         elif mode == 'replicate':
-> 1035             return _functions.thnn.ReplicationPad2d.apply(input, *pad)
   1036     elif input.dim() == 5:
   1037         assert len(pad) == 6, '5D tensors expect 6 values for padding'

/home/chenchr/anaconda3/lib/python3.6/site-packages/torch/nn/_functions/thnn/auto.py in forward(ctx, input, *params)
    124     @staticmethod
    125     def forward(ctx, input, *params):
--> 126         ctx._backend = type2backend[type(input)]
    127 
    128         ctx.additional_args = []

/home/chenchr/anaconda3/lib/python3.6/site-packages/torch/_thnn/__init__.py in __getitem__(self, name)
     13 
     14     def __getitem__(self, name):
---> 15         return self.backends[name].load()
     16 
     17 

KeyError: <class 'torch.LongTensor'>

When the padding mode is constant, it works.


#2

This line has to be:

a = Variable(torch.from_numpy(np.array([[[[1, 2, 3],[4,5,6],[7, 8, 9]]]])).float())

i.e. the input a has to be either of type Float or Double