It’s realy weird for the following small code snippet to has different result for np.linalg.norm
and torch.norm
.
import torch
from torch import nn
from torch.nn import functional as F
import numpy as np
def main():
out_channels = 16
kernelsz = 5
stride = 1
padding = 0
torch.manual_seed(22)
w = torch.randn(out_channels, 3, kernelsz, kernelsz, requires_grad=False)
b = torch.randn(out_channels, requires_grad=False)
for i in range(100):
x = torch.randn(4, 3, 28, 28)
out1 = F.conv2d(x, w, b, stride=stride, padding=padding)
print(i, out1.norm().item(), np.linalg.norm(out1.detach().numpy()) )
if __name__ == '__main__':
main()
it outputs:
o@m:~/arc/$ python myF.py
0 2070.43212890625 1630.2097
1 2105.404541015625 1656.4425
2 2107.125244140625 1656.174
3 2130.1220703125 1678.5837
4 2120.11669921875 1662.5927
5 2096.589599609375 1650.8307
6 2068.27001953125 1625.7941
7 2120.5830078125 1667.673
8 2090.04443359375 1640.3636
9 2107.760986328125 1655.7458
10 2107.1748046875 1660.1805
...
Anyone help me dig out the bugs ? Thank you.