with the same layer, same initialize method, and the same seed I run two pieces of code independently but get a different weight, really confusing.

```
import random
import os
import numpy as np
import torch
seed = 2020
random.seed(seed)
os.environ["PYTHONHASHSEED"] = str(seed)
np.random.seed(seed)
torch.manual_seed(seed)
if torch.cuda.is_available():
torch.cuda.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
torch.backends.cudnn.enabled = True
def get_encoder():
input_dim = 109
encoder = nn.Sequential()
input_dims = [input_dim] + [
int(i) for i in np.exp(np.log(input_dim) * np.arange(num_layers - 1, 0, -1) / num_layers)
]
for layer_i, (input_dim, output_dim) in enumerate(zip(input_dims[:-1], input_dims[1:])):
encoder.add_module("fc_" + str(layer_i), nn.Linear(input_dim, output_dim))
encoder.add_module("fc_" + str(layer_i) + "_act", nn.Softsign())
model.add_module("output_layer", nn.Linear(n_hiddens, 1))
model.add_module("output_layer", nn.Linear(1, 1))
return encoder
model = get_encoder()
nn.init.kaiming_normal_(model.fc_0.weight)
for p in model.parameters():
print(p.sum())
break
# output tensor(-12.7479, grad_fn=<SumBackward0>)
```

```
import random
import os
import numpy as np
import torch
seed = 2020
random.seed(seed)
os.environ["PYTHONHASHSEED"] = str(seed)
np.random.seed(seed)
torch.manual_seed(seed)
if torch.cuda.is_available():
torch.cuda.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
torch.backends.cudnn.enabled = True
model = nn.Linear(109, 33)
nn.init.kaiming_normal_(model.weight)
for n, p in model.named_parameters():
print(p.sum())
break
# output tensor(-5.6983, grad_fn=<SumBackward0>)
```