Hi,
I have defined my binary classifier as :
class CreditCardTransactionClassifier(nn.Module):
def init(self):
super(CreditCardTransactionClassifier, self).init()
self.fc1 = nn.Linear(29, 29)
self.relu1 = nn.ReLU()
self.dout = nn.Dropout(0.2)
self.fc2 = nn.Linear(29, 58)
self.prelu = nn.PReLU(1)
self.out = nn.Linear(58, 2)
def forward(self, input_):
a1 = self.fc1(input_)
h1 = self.relu1(a1)
dout = self.dout(h1)
a2 = self.fc2(dout)
h2 = self.prelu(a2)
a3 = self.out(h2)
return a3
def predict(self,x):
#Apply softmax to output.
pred = F.softmax(self.forward(x))
ans = []
#Pick the class with maximum weight
for t in pred:
if t[0]>t[1]:
ans.append(0)
else:
ans.append(1)
return torch.tensor(ans)
model = CreditCardTransactionClassifier().double()
My training loop is :
for epoch in range(num_epochs):
h = np.array([])
for data in train_loader:
x = data[:,0:29]
y = data[:,29]
y = y.reshape((-1,1))
#output = model(x)
y_pred = model.predict(x)
y_pred = y_pred.reshape((-1,1))
print(y_pred)
print(y_pred.shape)
print(y.shape)
loss = criterion(y_pred.float(),y.float())
h = np.append(h, loss.item())
optimizer.zero_grad()
loss.backward()
optimizer.step()
mean_loss = np.mean(h)
print('epoch [{}/{}], loss:{:.4f}'
.format(epoch + 1, num_epochs, mean_loss))
history['train_loss'].append(mean_loss)
torch.save(model.state_dict(), ‘./credit_card_model.pth’)
I am getting RuntimeError: element 0 of tensors does not require grad and does not have a grad_fn error
Can somebody please help.
Thx
Vimal