How to recreate the below lines written in tensorflow to pytorch

global_step = tf.train.get_or_create_global_step()
    dec_learning_rate = tf.compat.v1.train.exponential_decay(flags.learning_rate, global_step,50000, 0.5, staircase=True)
    optim = tf.train.AdamOptimizer(learning_rate=dec_learning_rate)

    gvs = optim.compute_gradients(loss)
    gvs = [(tf.clip_by_norm(grad,1), val) for grad,val in gvs]
    optim = optim.apply_gradients(gvs)
def my_hook(grad):
    grad = grad.clone()
    grad = torch.clamp(grad, min=0, max=1)
    return grad

for cur_param in model.parameters():
    cur_param.register_hook(my_hook)
    
loss = torch.nn.MSELoss()
optimizer = optim.Adam(model.parameters(), lr=0.001)
my_lr_scheduler = torch.optim.lr_scheduler.ExponentialLR(optimizer=optimizer, gamma=0.99)
    
for cur_epoch in range(10):
    optimizer.zero_grad()
    output = model(X)
    final_loss = loss(output, y)
    final_loss.backward()
    optimizer.step()
    my_lr_scheduler.step()
    print("Epoch {0} Loss is {1}".format(cur_epoch, final_loss.item()))
1 Like