See how the tensors are not the same (or their sum):
import numpy as np
import torch
# - creating a random baseline
b1 = np.random.randn(D, N)
b2 = np.random.randn(D, N)
b1_t, b2_t = torch.from_numpy(b1).T, torch.from_numpy(b2).T
assert(b1_t.size() == torch.Size([N, D]))
print('-- reproducibity finger print')
print(f'{b1.sum()=}')
print(f'{b2.sum()=}')
print(f'{b1_t.sum()=}')
print(f'{b2_t.sum()=}')
print(f'{b1.shape=}')
print(f'{b1_t.shape=}')
output:
-- reproducibity finger print
b1.sum()=8.881784197001252e-16
b2.sum()=1.5543122344752192e-15
b1_t.sum()=tensor(-4.4409e-16, dtype=torch.float64)
b2_t.sum()=tensor(8.8818e-16, dtype=torch.float64)
b1.shape=(7, 12)
b1_t.shape=torch.Size([12, 7])