LSTM variable batch size

How can I have the LSTM process a variable batch size ? My dataset size is 1515 but I want to use a batch size of 10.

class LSTM(nn.Module):
    def __init__(self, feature_dim, hidden_dim, batch_size):
        super(LSTM, self).__init__()
                
        # single layer lstm
        self.lstm = nn.LSTM(feature_dim, hidden_size=hidden_dim, num_layers=1, batch_first=True)
        self.h0 = Variable(torch.randn(1, batch_size, hidden_dim)) 
        self.c0 = Variable(torch.randn(1, batch_size, hidden_dim))
        
        # fc layers
        self.fc1 = nn.Linear(hidden_dim, 2)

is the only way to create more samples so it becomes a multiple of 10 ?