Getting an error while iterating over the dataloader

This is my custom dataloader which will take input features of shape (feature_length, 39)

import torch
torch.cuda.empty_cache()
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import torch.utils.data as Data
import scipy.io as sio
import numpy as np
import os
import shutil
PYTORCH_NO_CUDA_MEMORY_CACHING=1

#%% Dataloader
class Dataset(Data.Dataset):
    def __init__(self, feat=None, chf=None, span=None, context=None, shift=None):
        super(Dataset, self).__init__()
        self.feat = feat
        self.chf = chf
        self.span=span
        self.context=context
        self.shift=shift
        
        
    def __len__(self):
               
        leng=len(self.feat)
        
        return leng
    
    def label_creation(self, len, chf, span):

        #len=torch.max(len,span)

        lab=torch.zeros(len)

        if np.bitwise_not(np.any(chf.shape)):
            chf=np.array([chf])

        pri=chf-span
        post=chf+span
        pri=np.clip(pri,0,len)
        post=np.clip(post,0,len)
        op1=[]
        #print(pri.shape)
        for (x,y) in zip(pri,post):
            op1.append(torch.arange(x,y))
        op=torch.cat(op1)
        lab[op.to(dtype=torch.long)]=1
        return lab
    
    def padding_sample(self, x,label,context):
        #Reshape tensor if context < = feat length
        rep_samp=context-x.shape[0]
        temp=torch.tile(x[-1,:],(rep_samp,1))
        x_up=torch.cat((x,temp))
        temp_lab1=torch.zeros(rep_samp)
        label_up=torch.cat((label,temp_lab1))
        return x_up,label_up
    

    def reshape_tensor(self, x,y,context,shift):

     if x.shape[0]> context:
        id=int((x.shape[0]-context)/shift)+1
        zz=[]
        yy=[]
        for i in range(id):
            zz.append(torch.unsqueeze(x[i*shift:i*shift+context,:],0))
            yy.append(torch.unsqueeze(y[i*shift:i*shift+context],0))
        op=torch.cat(zz)
        lb=torch.cat(yy)
     else:
        #Reshape tensor if context < = feat length
       op,lb=self.padding_sample(x,y,context)
       op=op.unsqueeze(dim=0)
       lb=lb.unsqueeze(dim=0) 

     return op,lb
        
    def __getitem__(self, index):
        feat1=torch.from_numpy(self.feat[index]).to(dtype=torch.float) # needs to covert to float to tackle in model
        #feat1=torch.from_numpy(self.feat[index])
        label=self.label_creation(feat1.shape[0],self.chf[index],self.span)
        label=label.to(dtype=torch.long)
        #label=label
        #chf=self.chf[index]


        feat,label=self.reshape_tensor(feat1,label,self.context,self.shift)

        return feat, label

#%%
def collate_fn(batch_data):
    batch_features = torch.cat([feat[0] for feat in batch_data], dim=0)
    batch_labs = torch.cat([feat[1] for feat in batch_data], dim=0)
    batch_lengths = [feat[0].shape[0] for feat in batch_data]
    return batch_features, batch_labs, batch_lengths

Then i am defining the dataset and the dataloader

train_dst = Dataset(train_feat, train_chf,
                    span=20, context=500, shift=250)
train_loader = Data.DataLoader(
    train_dst, batch_size=32, pin_memory=True, num_workers=5, drop_last=True, collate_fn=collate_fn, shuffle=True)   

But when i am iterating over the dataloader i am getting the following error

* (Number end, *, Tensor out, torch.dtype dtype, torch.layout layout, torch.device device, bool pin_memory, bool requires_grad)
 * (Number start, Number end, *, torch.dtype dtype, torch.layout layout, torch.device device, bool pin_memory, bool requires_grad)
 * (Number start, Number end, Number step, *, Tensor out, torch.dtype dtype, torch.layout layout, torch.device device, bool pin_memory, bool requires_grad)

I dont understand what the problem, is it the GPU or the dtype of the input features.

Any kind of help will be greatly appreciated

The error is raised in:

for (x,y) in zip(pri,post):
    op1.append(torch.arange(x,y))

in the torch.arange call, which seems to get unexpected inputs.
Check x and y and make sure these are valid values