You could iterate all modules and check for their type via e.g.:
model = models.resnet18()
for name, module in model.named_modules():
if isinstance(module, nn.Conv2d):
print(f"registering hook for {name}")
module.register_forward_hook(lambda m, input, output: print(output.shape))
# registering hook for conv1
# registering hook for layer1.0.conv1
# registering hook for layer1.0.conv2
# registering hook for layer1.1.conv1
# registering hook for layer1.1.conv2
# registering hook for layer2.0.conv1
# registering hook for layer2.0.conv2
# registering hook for layer2.0.downsample.0
# registering hook for layer2.1.conv1
# registering hook for layer2.1.conv2
# registering hook for layer3.0.conv1
# registering hook for layer3.0.conv2
# registering hook for layer3.0.downsample.0
# registering hook for layer3.1.conv1
# registering hook for layer3.1.conv2
# registering hook for layer4.0.conv1
# registering hook for layer4.0.conv2
# registering hook for layer4.0.downsample.0
# registering hook for layer4.1.conv1
# registering hook for layer4.1.conv2
x = torch.randn(1, 3, 224, 224)
out = model(x)
# torch.Size([1, 64, 112, 112])
# torch.Size([1, 64, 56, 56])
# torch.Size([1, 64, 56, 56])
# torch.Size([1, 64, 56, 56])
# torch.Size([1, 64, 56, 56])
# torch.Size([1, 128, 28, 28])
# torch.Size([1, 128, 28, 28])
# torch.Size([1, 128, 28, 28])
# torch.Size([1, 128, 28, 28])
# torch.Size([1, 128, 28, 28])
# torch.Size([1, 256, 14, 14])
# torch.Size([1, 256, 14, 14])
# torch.Size([1, 256, 14, 14])
# torch.Size([1, 256, 14, 14])
# torch.Size([1, 256, 14, 14])
# torch.Size([1, 512, 7, 7])
# torch.Size([1, 512, 7, 7])
# torch.Size([1, 512, 7, 7])
# torch.Size([1, 512, 7, 7])
# torch.Size([1, 512, 7, 7])