Hi

I am new to pytorch and when I tried to use pytorch in my project I noticed that somehow it always predicts straight lines. I tried to isolate the problem and I completly failed to approximate a normal quadratic function with it. I very confused where I go wrong…

```
import torch
from torch import nn
from torch.autograd import Variable
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
import torch.optim as optim
def f_x(x):
return x * x / 64 - 5 * x / 4 + 25
# Building dataset
def build_dataset():
# Given f(x), is_f_x defines whether the function is satisfied
x_values = np.ones((21, 1))
for i in range(0, 21):
x_values[i] = i + 30 # True
return x_values
x_values = build_dataset()
# Building nn
# net = nn.Sequential(nn.Linear(1, 100), nn.ReLU(), nn.Linear(100, 100), nn.ReLU(), nn.Linear(100, 1))
net = nn.Sequential(nn.Linear(1, 1000), nn.ReLU(), nn.Linear(1000, 1000), nn.ReLU(), nn.Linear(1000, 1000), nn.ReLU(), nn.Linear(1000, 1))
# parameters
optimizer = optim.Adam(net.parameters(), lr=0.00001)
epochs = 200
def out(k):
# folder_name = "Testrun1"
# working_directory = pathlib.Path().absolute()
# output_location = working_directory / f'{folder_name}'
a = 30
b = 50
# TODO: copy graph so i only use a copy when it was still open
import matplotlib.backends.backend_pdf as pdfp
from pylab import plot, show, grid, xlabel, ylabel
import matplotlib.pyplot as plt
# pdf = pdfp.PdfPages("graph" + str(k) + ".pdf")
t = np.linspace(a, b, 20)
x = np.zeros(t.shape[0])
c_fig = plt.figure()
for j in range(len(t)):
h = torch.tensor(np.ones(1) * t[j], dtype=torch.float32)
x[j] = net(h)
plt.ylim([0, 1])
plot(t, x, linewidth=4)
xlabel('x', fontsize=16)
ylabel('net(x)', fontsize=16)
grid(True)
show()
# pdf.savefig(c_fig)
# pdf.close()
plt.close(c_fig)
def train():
net.train()
losses = []
for epoch in range(1, epochs):
x_train = Variable(torch.from_numpy(x_values)).float()
y_train = f_x(x_train)
y_pred = net(x_train)
loss = torch.sum(torch.abs(y_pred - y_train))
print("epoch #", epoch)
print(loss.item())
losses.append(loss.item())
optimizer.zero_grad()
loss.backward()
optimizer.step()
return losses
print("training start....")
losses = train()
plt.plot(range(1, epochs), losses)
plt.xlabel("epoch")
plt.ylabel("loss train")
plt.ylim([0, 100])
plt.show()
out(epochs)
```