Importing my functions on gpus

Hi, for compilation of my code I am using gpu (NVIDIA GeForce RTX 2080 Ti) but on running the code is running faster on my CPU(16Gb Ram with processors). My class of Neural network and there are several functions that I am using in my code are present on different modules on my system. Is it due to that or I have to move the functions to gpus?

I’m not sure I understand the question completely, but PyTorch will not move data to the GPU automatically.
You would have to move the model as well as the input data to the device via:

device = "cuda"
model.to(device)
data = data.to(device)

before executing the forward pass.

Start=time.time()
dev=torch.device(“cuda:1” if torch.cuda.is_available() else “cpu”)
J1=-0.43
m=5
n=4
I_Val=pf.linear_index(m,n) #function present in a module
I_Val_2=pf.linear_index_2(m)#function present in a module
P_SET=[]
M_SET=[]
Q_SET=[]
loss_f=[]
for j in range(20):
Start1=time.time()
I_Par=[]
I_Par=rand_mat(m,n,I_Val).to(dev)# converting ((m,m,m,m,n) data structure to list as an input for the network)
End1=time.time()
print(I_Par.is_cuda,‘Time in uploding vector’,End1-Start1)
Inter=[]
P_list=[]
for i in range(len(I_Par[0])):
Start2=time.time()
Net=pf.Neural_Made(n,m).to(dev)
Net=(Net(I_Par).tolist())
P_list.append(Net[i])
End2=time.time()
print(‘Time in network’,End2-Start2)

and this is my neural network:

class Neural_Made(nn.Module):
def init(self,n,m):
super(Neural_Made,self).init()
self.n=n
self.m=m
self.in_size=nmmmm
self.register_buffer(‘Mask1’,torch.ones([self.in_size]*2))
self.register_buffer(‘Mask2’,torch.eye(self.in_size))
self.Mask1=torch.tril(self.Mask1)
self.Mask2=self.Mask1-self.Mask2

    self.fc1=nn.Linear(self.in_size,self.in_size)
    nn.init.xavier_uniform_(self.fc1.weight)
    self.fc1.weight.data=self.fc1.weight.data*self.Mask2

    self.fc2=nn.Linear(self.in_size,self.in_size)
    nn.init.xavier_uniform_(self.fc2.weight)
    self.fc2.weight.data=self.fc2.weight.data*self.Mask1

    self.out=nn.Linear(self.in_size,self.in_size)
    nn.init.xavier_uniform_(self.out.weight)
    self.out.weight.data=self.out.weight.data*self.Mask1

    self.prelu=nn.PReLU()
    self.sig=nn.Sigmoid()
def forward(self,x):
    x=self.fc1(x)
    x=self.prelu(x)
    x=self.fc2(x)
    x=self.prelu(x)
    x=self.out(x)
    x=self.sig(x)
    return x.view(-1)