Hello.
I think there is a bug in in-place bernoulli sampling. I put here the code that check for that. The code samples using in-place and non in-place mode.
import torch
import numpy
print "----BERNOULLI----"
torch.manual_seed(seed=1)
torch.cuda.manual_seed(seed=1)
a=torch.zeros((10,))
print a.bernoulli_().numpy()
a=torch.zeros((10,))
print a.bernoulli_().numpy()
torch.manual_seed(seed=1)
torch.cuda.manual_seed(seed=1)
a=torch.zeros((10,))
print a.bernoulli_().numpy()
a=torch.zeros((10,))
print a.bernoulli_().numpy()
print "--------------------------"
torch.manual_seed(seed=1)
torch.cuda.manual_seed(seed=1)
a=torch.zeros((10,))
print torch.bernoulli(a).numpy()
print torch.bernoulli(a).numpy()
torch.manual_seed(seed=1)
torch.cuda.manual_seed(seed=1)
print torch.bernoulli(a).numpy()
print torch.bernoulli(a).numpy()
print "----NORMAL----"
torch.manual_seed(seed=1)
torch.cuda.manual_seed(seed=1)
a=torch.zeros((10,))
print a.normal_().numpy()
a=torch.zeros((10,))
print a.normal_().numpy()
torch.manual_seed(seed=1)
torch.cuda.manual_seed(seed=1)
a=torch.zeros((10,))
print a.normal_().numpy()
a=torch.zeros((10,))
print a.normal_().numpy()
print "--------------------------"
torch.manual_seed(seed=1)
torch.cuda.manual_seed(seed=1)
a=torch.zeros((10,))
print torch.normal(a).numpy()
print torch.normal(a).numpy()
torch.manual_seed(seed=1)
torch.cuda.manual_seed(seed=1)
print torch.normal(a).numpy()
print torch.normal(a).numpy()