Rafael_R
(jean)
#1
Hi,
I am creating a simple network:
class RandomNet(nn.Module):
def __init__(self, vocab_size):
super(RandomNet, self).__init__()
self.vocab_size = vocab_size
self.linear = nn.Linear(1, 2)
def save_checkpoint(self, path):
th.save(self.state_dict(), path)
def load_checkpoint(self, path, cpu=False):
if cpu:
self.load_state_dict(th.load(path,
map_location=lambda storage, loc: storage))
else:
self.load_state_dict(th.load(path))
def forward(self, y):
h = self.linear(y)
return [h[0], h[1]]
When I wrapped this model inside a wrapper model, and just print the parameters:
self.net = RandomNet(src_vocab_size)
print(model.parameters())
AttributeError: 'RandomModel' object has no attribute 'parameters'
But I do have the linear parameter. How could I fix the error?
Am I making a mistake in using the linear layer?
I am not sure where you used self.net
but I create a working example for you:
import torch
import torch.nn as nn
class RandomModel(nn.Module):
def __init__(self):
super().__init__();
self.net = None
self.linear = None
def forward(self, x):
return x
class RandomNet(nn.Module):
def __init__(self, vocab_size):
super(RandomNet, self).__init__()
self.vocab_size = vocab_size
self.linear = nn.Linear(1, 2)
def save_checkpoint(self, path):
th.save(self.state_dict(), path)
def load_checkpoint(self, path, cpu=False):
if cpu:
self.load_state_dict(th.load(path,
map_location=lambda storage, loc: storage))
else:
self.load_state_dict(th.load(path))
def forward(self, y):
h = self.linear(y)
return [h[0], h[1]]
src_vocab_size=20
net = RandomNet(src_vocab_size)
print(list(net.parameters()))
model = RandomModel()
model.net = RandomNet(src_vocab_size)
print(list(model.parameters()))
Out of the the Random Model will be:
[Parameter containing:
tensor([[0.1979],
[0.7473]], requires_grad=True), Parameter containing:
tensor([-0.4504, 0.0435], requires_grad=True)]
1 Like
Rafael_R
(jean)
#3
thanks! yes, so the model.net params have to be passed to the optimizer, silly mistake since my model inherited from object
1 Like