I found a functon in https://github.com/pytorch/pytorch/blob/2ed4d65af0a1993c0df7b081f4088d0f3614283e/torch/nn/utils/memory_format.py#L80C6-L80C7
I do the following test
def convert():
a = torch.randn(1,2,3,4)
print(a.is_contiguous()) # True
print(a.is_contiguous(memory_format = torch.contiguous_format)) # True
print(a.is_contiguous(memory_format = torch.channels_last)) # False
print(a.shape) # torch.Size([1, 2, 3, 4])
print(a.stride()) # (24, 12, 4, 1)
b = (
a.clone().contiguous(memory_format=torch.channels_last)
)
print(b.is_contiguous()) # False
print(b.is_contiguous(memory_format = torch.contiguous_format)) # False
print(b.is_contiguous(memory_format = torch.channels_last)) # True
print(b.shape) # torch.Size([1, 2, 3, 4])
print(b.stride()) # (24, 1, 8, 2)
print(b.size())
c = b.resize_(
b.size(), memory_format=torch.channels_last
) # ! no use?????
print(c.is_contiguous()) # False
print(c.is_contiguous(memory_format = torch.contiguous_format)) # False
print(c.is_contiguous(memory_format = torch.channels_last)) # True
print(c.shape) # torch.Size([1, 2, 3, 4])
print(c.stride()) # (24, 1, 8, 2)
so what is the use of resize_?