TypeError: ‘Tensor’ object is not callable

Hello,everyone.
Newbie here, trying to learn pytorch. Recently, I’m using lstm or gru to do failure prediction. This is a binary classification problem ,so I use BCEWithLogitsLoss as the loss function, but finally, it is wrong.

%matplotlib notebook
import pandas as pd
import os
import numpy as np
import matplotlib.pyplot as plt
from sklearn.preprocessing import MinMaxScaler
import torch
from torch import nn

data = pd.read_csv('D:/A_PHM_Data/data/train/002/00a22713-68d5-372a-a009-b948ce453442.csv', header=0)
data = data.iloc[:, :-3]
value = data.values.astype(float)

train_x = value[:300].reshape(1, -1, 72)
train_y = np.zeros((train_x.shape[1],1))
train_x = torch.from_numpy(train_x)
train_y = torch.from_numpy(train_y)

class LSTM(nn.Module):
    def __init__(self):
        super(LSTM,self).__init__()
        self.lstm = nn.LSTM(input_size=72, hidden_size=100,batch_first=True)
        self.out = nn.Linear(100,1)
    
    def forward(self, x, h_state, c_state):
        r_out, (h_state, c_state) = self.lstm(x, (h_state, c_state))
        output = self.out(r_out)
#         output = torch.sigmoid(output)
        return output
    
    def InitHidden(self):
        h_state = torch.zeros(1,1,100)
        c_state = torch.zeros(1,1,100)
        return h_state, c_state


device = torch.device('cuda')
model = LSTM().to(device)
optimizer = torch.optim.Adam(model.parameters(), lr=0.001)
loss = torch.nn.BCEWithLogitsLoss()
h_state, c_state = model.InitHidden()
h_state, c_state = h_state.to(device), c_state.to(device)
train_x = train_x.float().to(device)
train_y = train_y.float().to(device)
test_x = test_x.to(device)
test_y = test_y.to(device)

model.train()
for epoch in range(1000):
    output = model(train_x, h_state,c_state).squeeze(0)
    loss = loss(output, train_y)
    optimizer.zero_grad()
    loss.backward()
    optimizer.step()

TypeError Traceback (most recent call last)
in
2 for epoch in range(1000):
3 output = model(train_x, h_state,c_state).squeeze(0)
----> 4 loss = loss(output, train_y)
5 optimizer.zero_grad()
6 loss.backward()

TypeError: ‘Tensor’ object is not callable

You are overwriting the loss function loss with the loss value in:

loss = loss(output, train_y)

Use criterion = nn.BCEWithLogitsLoss or use another name for the loss value and it should work.

1 Like

Thanks!!Very appreciate!