Many to one LSTM network problem with sequence length

I have time series data with data(num_of_samples, sample_length)
num_of_samples=2000
samle_length =600

I have implemented my lstm model :

class Recurrent_Layer(nn.Module):
def init(self, num_classes, hidden_size):
super(Recurrent_Layer, self).init()

    self.lstm=nn.LSTM( hidden_size, hidden_size) 
    self.fc=nn.Linear( hidden_size, num_classes)
    
def forward(self, x, h_init,c_init):
    out, (h_final, c_final) =self.lstm(x, (h_init,c_init))
    score_seq      =self.fc(out[-1])
    return score_seq,  h_final , c_final

my problem that I need to know how to organise dimension of input such that I have 20 samples every time # with each sample with length 600 , in other words, I need to enroll the lstm 20 times each with 600 points.

trainnig

for i in range(0,2000-seq_length,seq_length):

    h = torch.zeros(1,1,  hidden_size)
    c = torch.zeros(1,1,  hidden_size)
    h=h.to(device)
    c=c.to(device)

    lstm_optimizer.zero_grad()

    input_sequence  =  source_data[i:i+seq_length]
    input_labels =  source_labels[i:i+seq_length]
 
    input_labels=input_sequence.to(device)
    input_labels=input_labels.to(device)

    h=h.detach()
    c=c.detach()
    h=h.requires_grad_()
    c=c.requires_grad_()

    scores_char, h, c = lstm_net(input_sequence.view(seq_length,1), h, c)

#error
RuntimeError: invalid argument 2: size ‘[20 x 1]’ is invalid for input with 12000 elements at …\aten\src\TH\THStorage.cpp:84

#I need to understand how can I arrange my data so that I achieve this task ?