Parameters updated only for one epoch

I use Pytorch 0.3.1 and I have an LSTM with an embedding before and FC on top of it. When I print the model’s parameters, I see they change once and after that, only the last layer’s parameters change and all other layer’s parameters do not change. Any idea?

model = LSTMmodel(train.vocab_num, args)
    if USE_CUDA:
        model.cuda()
     train_loader = DataLoader(train, batch_size=args.batch_size, shuffle=True, num_workers=1, collate_fn=collate_batch, drop_last=True)

    test_loader = DataLoader(test, batch_size=args.batch_size, shuffle=False, num_workers=1, collate_fn=collate_batch)

    optimizer = optim.Adam(model.parameters(), lr=args.prate)
    loss_function = nn.CrossEntropyLoss()

    for epoch in range(args.epochs):

        for iter, traindata in enumerate(train_loader):
            train_inputs, train_labels, train_seq_length = traindata
            train_labels = torch.squeeze(train_labels)

            if USE_CUDA:
                train_inputs, train_labels = Variable(train_inputs.cuda()), train_labels.cuda()
            else:
                train_inputs = Variable(train_inputs)

            model.zero_grad()
            model.batch_size = len(train_labels)
            model.hidden = model.init_hidden()
            output = model(train_inputs, train_seq_length)
            loss = loss_function(output, Variable(train_labels))
            loss.backward()
            optimizer.step()