Runtime error: when using torch.bernoulli

hello, i am new to pytorch and tried to implement a simple RBM following deeplearning.net. I get a simple implementation, but with runtime error when executing at torch.bernoulli function. I have no idea how it occur, and find no answer somewhere else. can someone help me out?

my code:

import os
import torch
from torch.autograd import Variable
import torch.nn.functional as nn
import torch.optim as optim
from torchvision import datasets, transforms
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
# hyper_params
cd_k = 5
lr = 0.001
batch_sz = 20
test_btsz = 16
nH = 512
nV = 28 * 28
v_bias = Variable(torch.zeros(nV), requires_grad=True)
h_bias = Variable(torch.zeros(nH), requires_grad=True)
W = Variable(torch.normal(torch.zeros(nV, nH), 0.01), requires_grad=True)
params = [v_bias, h_bias, W]
solver = optim.Adam(params, lr=lr)
# data sets, Minist 28x28 train:60000 test:10000
train_loader = torch.utils.data.DataLoader(
        datasets.MNIST('Data/data', train=True, download=True,
                       transform=transforms.Compose([
                               transforms.ToTensor()
                               ])),
        batch_size=batch_sz, shuffle=True)
test_loader = torch.utils.data.DataLoader(
        dataset=datasets.MNIST('Data/data', train=True, download=True,
                               transform=transforms.Compose([
                                       transforms.ToTensor()
                                       ])),
        batch_size=test_btsz, shuffle=True)
def xw_b(x, w, b):
    return x @ W + b.repeat(x.size(0), 1)
def free_enery(v):
    xwb = xw_b(v, W, h_bias)
    vbias_term = v @ v_bias
    hidden_term = torch.sum(torch.log(1 + torch.exp(xwb)), dim=1)
    return -hidden_term - vbias_term
def sample_vfh(h):
    wx_b = h @ W.t() + v_bias.repeat(h.size(0))
    prob = nn.sigmoid(wx_b)
    return torch.bernoulli(prob)
def sample_hfv(v):
    wx_b = v @ W + h_bias.repeat(v.size(0))
    prob = nn.sigmoid(wx_b)
    return torch.bernoulli(prob)
def gibbs_chain(x, cd_k):
    v_ = x
    for _ in range(cd_k):
        h_ = sample_hfv(v_)
        v_ = sample_vfh(h_)
    return v_
def train_rbm_by_batch(x):
    chain_end = gibbs_chain(x, cd_k)
    v_ = Variable(chain_end.data, requires_grad=False)
    loss = -torch.mean(free_enery(x) - free_enery(v_))
    solver.zero_grad()
    loss.backward()
    solver.step()
    return loss
def train_rbm():
    for epoch in range(100):
        loss, batch_idx = 0, 0
        for batch_idx, data in enumerate(train_loader, 0):
            inputs, _ = data
            inputs = Variable(inputs.view(batch_sz, 28*28))
            loss = train_rbm_by_batch(inputs)
        print('Epoch-{}; loss: {} '.format(epoch, loss.data.numpy()))
if __name__ == '__main__':
    train_rbm()

error information:

File “/Users/yx/Documents/ANN/rbm_pytorch.py”, line 79, in gibbs_chain
h_ = sample_hfv(v_)

File “/Users/yx/Documents/ANN/rbm_pytorch.py”, line 73, in sample_hfv
return torch.bernoulli(prob)

File “/Users/yx/anaconda/envs/yx/lib/python3.6/site-packages/torch/autograd/variable.py”, line 705, in bernoulli
return Bernoulli()(self)

File “/Users/yx/anaconda/envs/yx/lib/python3.6/site-packages/torch/autograd/stochastic_function.py”, line 23, in _do_forward
result = super(StochasticFunction, self)._do_forward(*inputs)

File "/Users/yx/anaconda/envs/yx/lib/python3.6/site-packages/torch/autograd/functions/stochastic.py", line 41, in forward
samples = probs.new().resize_as
(probs).bernoulli_(probs)

RuntimeError: must be >= 0 and <= 1 at /Users/soumith/anaconda/conda-bld/pytorch-0.1.10_1488750756897/work/torch/lib/TH/THRandom.c:270

RuntimeError: must be >= 0 and <= 1

As seen, the function expects input >=0 and <=1 but you have not given that.