Using linear layers? New user transfering from keras

Well is this code correct? For constructing several layers, with the nonlinear activation functions tanh, sigmoid, softmax?

class Net(nn.Module):
    def init(self):
        super(Net, self).init()
        self.hidden = nn.Linear(784,512)
        self.hidden2 = nn.Linear(512,512)
        self.hidden3 = nn.Linear(512,10)
        self.out = nn.Linear(10,1)
def forward(self, x):
    x = F.tanh(self.hidden(x))
    x = F.dropout(self.hidden(x),0.2)
    x = F.sigmoid(self.hidden(x))
    x = F.dropout(self.hidden(x),0.2)
    x = F.softmax(self.hidden(x))
    x = self.out(x)