I have no idea why this is happening…

I built this model a couple of days ago and it worked with good training and predictions.

However I opened the file today as I needed to add some things to the code (unrelated to the model) and now when I train it the loss stays at around 1 no matter how long I train it for - 10, 100, 1000, 10000 epochs.

I literally have not changed anything.

Anyway here is the relevant code:

```
class FundedDateNN(nn.Module):
def __init__(self, input_size, hidden_size, output_size=1):
super().__init__()
self.fc1 = nn.Linear(input_size, hidden_size)
self.fc2 = nn.Linear(hidden_size, output_size)
self.init_weights()
def init_weights(self):
initrange = 0.5
self.fc1.weight.data.uniform_(-initrange, initrange)
self.fc1.bias.data.zero_()
self.fc2.weight.data.uniform_(-initrange, initrange)
self.fc2.bias.data.zero_()
def forward(self, x):
x = self.fc1(x)
return self.fc2(x)
def predict(self,x):
return self.forward(x)
```

```
hidden_size = 20
model = FundedDateNN(input_size, hidden_size)
criterion = nn.MSELoss()
optimizer = torch.optim.SGD(model.parameters(), lr=0.01)
```

```
def train(model, dataloader, num_epochs):
model.train()
losses = list()
ts = time.time()
for epoch in range(num_epochs):
epoch_losses = list()
for idx, (X, y) in enumerate(trainloader):
optimizer.zero_grad()
out = model(X)
loss = criterion(out.squeeze(), y)
epoch_losses.append(loss.item())
loss.backward()
optimizer.step()
losses.append(np.mean(epoch_losses))
print('Epoch: {}, loss: {}'.format(epoch, np.mean(epoch_losses)))
te = time.time()
fig, ax = plt.subplots()
ax.plot(range(num_epochs), losses)
plt.show()
mins = int((te-ts) / 60)
secs = int((te-ts) % 60)
print('Training completed in {} minutes, {} seconds.'.format(mins, secs))
return losses, model
```

```
n_epochs = 100
losses, model = train(model, trainloader, n_epochs)
```

I would really, really appreciate some help with this.