My own custom hyperparameter search

I implemented my own custom parameter search for LSTM, from the code I have attached below is there a better way I could initialize the model and optimizer for each set of parameters without retraining everytime? :grinning:

def lstm_random_parameter_search(parameters):
    train_data, test_data = trainloader, valloader
    epochs            = 100
    # number of inputs nodes to the reservoir
    n_inputs          = image_dimension**2
    # number of output nodes from the reservoir
    n_outputs         = image_dimension**2
    n_layers          = 1
    criterion         = weighted_binary_cross_entropy 
    lag               = 5
    learning_rate     = parameters['learning_rate']
    hidden_size       = parameters['hidden_size']
    bidirectional     = parameters['bidirectional']
    max_iou           = 0
    trails            = 0
    # model = torch.load('final_results/rnn_best_model_100.pt').to(device)
    
    for lr_id, lr in enumerate(learning_rate):   
        for hs_id, hs in enumerate(hidden_size):
                save_grid_parameters(parameters, lr_id, hs_id, bi_id)
                model       = ImageRNN(n_inputs, n_outputs, n_hidden=hs).to(device)
                 # initliaze optimizer along with data loaders
                optimizer=torch.optim.SGD(model.parameters(), lr = lr, momentum=0.9, weight_decay=0.001)
                for epoch in np.arange(1, epochs+1):
                    for batch_idx, (inputs, labels, names) in enumerate(train_data):
                        trails +=1
                        # load data and move data to GPU's
                        inputs = inputs.to(device)
                        labels = labels.to(device)
                        
                        # shift the time series images 
                        inputs, labels = lag_data(inputs.float(), labels, lag=lag)
            
                        # initialize the hidden state on the GPU's
                        model.init_hidden(inputs, device)
                        # forward-propogation
                        outputs = model(inputs) 
                        
                        loss = criterion(labels.view(-1, image_dimension*image_dimension), 
                        outputs.view(-1, image_dimension*image_dimension))
                        loss.backward()
                        optimizer.step()
                        optimizer.zero_grad()

                        # test on validation-testing data
                        iou, f1, psnr, val_loss = get_accuracy(model, test_data)
                        
                        if iou > max_iou:
                            best_parameters = dict()
                            m = 'optimal parameters found after {} trails'.format(trails)
                            print(m)   
                            best_parameters['learning_rate']=lr
                            best_parameters['reservoir_size']=hs
                            best_parameters['iou']=iou
                            best_parameters['f1_score']=f1
                            best_parameters['psnr']=psnr
                            best_parameters['loss']=loss.detach().item()
                            best_parameters['val_loss']=val_loss
                            torch.save(model, 'final_results/rnn_best_model_100.pt')
                            json.dump( best_parameters, open("final_results/rnn_best_parameters_100.json", 'w'))

                            max_iou = iou
                            
                            
                         # clear variables from memory
                        del inputs, labels, outputs 
                        torch.cuda.empty_cache()

                    msg ='Epoch: {}, Training Loss: {:.3f}, Validation Loss: {:.3f}, IoU: {:.3f}, f1: {:.3f},  PSNR: {:.6f}'.format(epoch, val_loss,
                                                                                                loss.detach().item(), iou, f1, psnr) 
                    print(msg)    
    
    print(best_parameters)
    return best_parameters, model