I need someone to help me solve this problem about module.parameters always told me that optimizer got an empty parameter list None

import numpy as np
import matplotlib.pyplot as plt
import torch as t
import torch.nn as nn
import torch.optim as optim
from torch.autograd import Variable

#

x_train = np.array([[3.3], [4.4], [5.5], [6.71], [6.93], [4.168], [9.779], [6.182], [7.59] ,[2.167], [7.042], [10.791], [5.313], [7.997], [3.1]], dtype=np.float32 )
y_train = np.array([[1.7], [2.76], [2.09], [3.19], [1.694], [1.573], [3.366], [2.596], [2.53], [1.221], [2.827], [3.465], [1.65], [2.904], [1.3]],  dtype=np.float32)
plt.scatter(x_train, y_train)# plot process
plt.show()
print(x_train.dtype)


x_train =t.tensor( t.from_numpy(x_train) )
y_train =t.tensor( t.from_numpy(y_train) )



class LinearRegression(nn.Module):
    def _init_(self):
        super(LinearRegression,self)._init_()
        self.linear = nn.Linear(1,1)# input and output is i dimension
        
        
    def forward(self, x):
        out = self.Linear(x)
        return out
    
        
if t.cuda.is_available():
    model = LinearRegression().cuda()
else:
    model = LinearRegression()



criterion = nn.MSELoss()
optimizer = optim.SGD(model.parameters(), lr=1e-3)



num_epochs = 1000

for epoch in range(num_epochs):
    if t.cuda.is_available():
        inputs = Variable(x_train, requires_grad=True).cuda()
        target = Variable(y_train, requires_grad=True).cuda()
    else: 
        inputs = Variable(x_train)
        target = Variable(y_train)
        
    
    
    
    out = model(inputs)# forward
    loss = criterion(out, target)#compute loss
    optimizer.zero_grad()# zero grad
    loss.backward()#back propgation
    optimizer.step()# update parameters
    
    
    
    
    
    if(epoch+1 )%20 == 0:
        print('Epoch[{}/{}]), loss:{:.6f}'
             .format(epoch+1, num_epochs, loss.data[0]))


    

why always told me :  optimizer got an empty parameter list
None

The definition of __init__ is wrong.
You should use two underscores at each side instead of one :wink: :

def __init__(self):
        super(LinearRegression,self).__init__()
1 Like

realy??:slightly_frowning_face:

iā€™m a fresh man in python .thanks a lot
feel so stupid:grinning:

No worries! We all had to start at some point! :wink:

1 Like