When SPP is invoked, the system reports errors:

code:

```
import torch
import math
import torch.nn.functional as F
def spatial_pyramid_pool(previous_conv, num_sample, previous_conv_size, out_pool_size):
for i in range(len(out_pool_size)):
kernel_size = (math.ceil(previous_conv_size[0] / out_pool_size[i]), math.ceil(previous_conv_size[1] / out_pool_size[i]))
stride = (math.ceil(previous_conv_size[0] / out_pool_size[i]), math.ceil(previous_conv_size[1] / out_pool_size[i]))
pooling = (
math.floor((kernel_size[0] * out_pool_size[i] - previous_conv_size[0] + 1) / 2), math.floor((kernel_size[1] * out_pool_size[i] - previous_conv_size[1] + 1) / 2))
x = (F.max_pool2d(previous_conv, kernel_size=kernel_size, stride=stride, padding=pooling)).view(num_sample, -1)
if(i == 0):
spp = x.view(num_sample, -1)
# nn.MaxPool2d.view() 已经试过，不对
# print("spp size:",spp.size())
else:
# print("size:",spp.size())
spp = torch.cat((spp, x.view(num_sample, -1)), 1)
return spp
errors:
File "C:/Users/15044/pycharmProjects/cell/CNN_SPP/recognition.py", line 161, in <module>
logits = inference(x, False, regularizer)
File "C:/Users/15044/pycharmProjects/cell/CNN_SPP/recognition.py", line 126, in inference
spp = spatial_pyramid_pool(relu4, 63, [256, 256], [1, 4])
File "C:\Users\15044\pycharmProjects\cell\CNN_SPP\spp_layer.py", line 38, in spatial_pyramid_pool
x = (F.max_pool2d(previous_conv, kernel_size=kernel_size, stride=stride, padding=pooling)).view(num_sample, -1)
File "D:\Users\15044\Anaconda3\lib\site-packages\torch\_jit_internal.py", line 132, in fn
return if_false(*args, **kwargs)
File "D:\Users\15044\Anaconda3\lib\site-packages\torch\nn\functional.py", line 425, in _max_pool2d
input, kernel_size, stride, padding, dilation, ceil_mode)[0]
File "D:\Users\15044\Anaconda3\lib\site-packages\torch\nn\functional.py", line 417, in max_pool2d_with_indices
return torch._C._nn.max_pool2d_with_indices(input, kernel_size, _stride, padding, dilation, ceil_mode)
TypeError: max_pool2d_with_indices(): argument 'input' (position 1) must be Tensor, not Tensor
```