Hi
I am applying following operation to a 3D cube with dimension (48x48x48) and each value in the cube is 256 dimensional so basically the input cube’s dimension is:(48x48x48x256) just like an rgb image is (h,w,3) where h is height and w is width.
I try to track gpu memory consumption in inference using max memory allocated. I track memory at every line. I see that memory increases a lot after conv3d operation. However when I apply this operation on input of (80x80x80x256) the memory consumption is lower then when I apply this operation on (48x48x48x256) which is counter intuitive. Memory consumption on higher resolution input should be higher.
Max memory consumption on input with 48 resolution is 9 gb while that on 80 resolution is 6 gb. If anybody know any potential reason, you may describe it.
class Op(nn.Module):
def __init__(self, in_channels, out_channels, kernel_size):
super().__init__()
self.in_channels = in_channels
self.out_channels = out_channels
voxel_layers = [
nn.Conv3d(in_channels, out_channels, kernel_size, stride=1, padding=kernel_size // 2),
nn.BatchNorm3d(out_channels, eps=1e-4),
nn.LeakyReLU(0.1, True),
nn.Conv3d(out_channels, out_channels, kernel_size, stride=1, padding=kernel_size // 2),
nn.BatchNorm3d(out_channels, eps=1e-4),
nn.LeakyReLU(0.1, True),
]
self.voxel_layers = nn.Sequential(*voxel_layers)
def forward(self, inputs):
voxel_features = inputs
voxel_features = self.voxel_layers(voxel_features)
print("################################# start")
print("############### MEMORY CONSUMPTION, ", torch.cuda.max_memory_allocated())
torch.cuda.reset_peak_memory_stats()
torch.cuda.empty_cache()
voxel_features = self.voxel_layers[0](voxel_features)
print("############### MEMORY CONSUMPTION, ", torch.cuda.max_memory_allocated())
torch.cuda.reset_peak_memory_stats()
torch.cuda.empty_cache()
voxel_features = self.voxel_layers[1](voxel_features)
print("############### MEMORY CONSUMPTION, ", torch.cuda.max_memory_allocated())
torch.cuda.reset_peak_memory_stats()
torch.cuda.empty_cache()
voxel_features = self.voxel_layers[2](voxel_features)
print("############### MEMORY CONSUMPTION, ", torch.cuda.max_memory_allocated())
torch.cuda.reset_peak_memory_stats()
torch.cuda.empty_cache()
voxel_features = self.voxel_layers[3](voxel_features)
print("############### MEMORY CONSUMPTION, ", torch.cuda.max_memory_allocated())
torch.cuda.reset_peak_memory_stats()
torch.cuda.empty_cache()
voxel_features = self.voxel_layers[4](voxel_features)
print("############### MEMORY CONSUMPTION, ", torch.cuda.max_memory_allocated())
torch.cuda.reset_peak_memory_stats()
torch.cuda.empty_cache()
voxel_features = self.voxel_layers[5](voxel_features)
print("############### MEMORY CONSUMPTION, ", torch.cuda.max_memory_allocated())
torch.cuda.reset_peak_memory_stats()
torch.cuda.empty_cache()
Thanks