Replace torch.optim.Adam by torch.optim.LBFGS

please, how can I replace torch.optim.Adam by torch.optim.LBFGS in this code?

model = Net().to(device)
crit = nn.BCELoss(reduce=True)
optimizer = torch.optim.Adam(model.parameters(), lr=1e-3)

def train():
    model.train()
    loss_all = 0
    for data in train_loader:
        data = data.to(device)
        optimizer.zero_grad()
        output = model(data)
        label = data.y
        loss = crit(output, label)
        loss.backward()
        loss_all += data.num_graphs * loss.item()
        optimizer.step()
    return loss_all / len(train_dataset)

for epoch in tqdm(range(100), desc="Epochs: ", leave=True):
    loss = train()
    print('Epoch: {:03d}, Loss: {:.5f}.format(epoch, loss))

and thank you

1 Like

You would need to move the loss calculation and update to a closure as seen in this example:

import torch
from torch.optim import LBFGS

x = torch.tensor([100.], device='cuda', requires_grad=True)
optimizer = LBFGS([x])
def f(x):
	return (x - 1) * (x - 1)

n_step = 2

for _ in range(0, n_step):
    def closure():
        optimizer.zero_grad()
        output = f(x)
        output.backward()
        return output
    optimizer.step(closure)
    print(x)
1 Like