If you just want to load a single layer, this code should work fine:
class MyModel(nn.Module):
def __init__(self):
super(MyModel, self).__init__()
self.fc1 = nn.Linear(10, 10)
self.fc2 = nn.Linear(20, 20)
self.act = nn.ReLU()
def forward(self, x):
x = self.act(self.fc1(x))
x = self.fc2(x)
return x
model = MyModel()
torch.save(model.state_dict(), 'tmp.pth')
state_dict = torch.load('tmp.pth')
model = MyModel()
with torch.no_grad():
model.fc1.weight.copy_(state_dict['fc1.weight'])
model.fc1.bias.copy_(state_dict['fc1.bias'])