Binary Activation Function with Pytorch

Thank you for your point. I rewrote my code according to the example:

class Net(nn.Module):
    def __init__(self):
        super(Net, self).__init__()
        self.fc1 = nn.Linear(28 * 28, 64)
        self.fc2 = nn.Linear(64, 10)
        nn.init.xavier_normal_(self.fc1.weight)
        nn.init.xavier_normal_(self.fc2.weight)

    def forward(self, x):
        x1 = F.relu(self.fc1(x))
        x_backward = x1
        x1[x1 <= 0] = 0
        x1[x1> 0] = 1
        x_forward = x1
        y1 = x_backward + (x_forward - x_backward).detach()
        y2 = self.fc2(y1)
        y3 = F.log_softmax(y2, dim=1)
        return y3  

But it does not work and it returns binary values during both forward and backward phases.

I also tried to define a new class and define the forward and backward paths separately…

I tried to do it like this but it gives me an error (AttributeError: ‘Binary_AF’ object has no attribute ‘dim’ ).

Do you think it makes sense to write the code like this? if yes, how to take care of the error and if no, I would appreciate if you have any suggestion.

class Binary_AF:
    def __init__(self, x):
        self.x = x

    def forward(self):
        self.x[self.x <= 0] = 0
        self.x[self.x > 0] = 1
        return self.x

    def backward(self):
        return self.x


class Net(nn.Module):
    def __init__(self):
        super(Net, self).__init__()
        self.fc1 = nn.Linear(28 * 28, 64)
        self.fc2 = nn.Linear(64, 10)
        nn.init.xavier_normal_(self.fc1.weight)
        nn.init.xavier_normal_(self.fc2.weight)

    def forward(self, x):
        x = F.relu(self.fc1(x))
        y = Binary_AF(x)
        y = self.fc2(y)
        y = F.log_softmax(y, dim=1)
        return y