I am trying to create a pytorch model and add relu activation after every linear layer. But I am getting
TypeError : torch.nn.functional.relu is not a Module subclass
Model Class :
class Model(nn.Module):
def __init__(self, in_features=4, out_features=3):
super().__init__()
self.net = nn.Sequential(
nn.Linear(in_features, 8),
F.relu,
nn.Linear(8,9),
F.relu,
nn.Linear(9, out_features) )
def forward(self,x):
x = self.net(x)
return x