Cosine embedding loss

I am using cosine embedding loss. This is the model

class CNN(nn.Module):
    def __init__(self, input_size, hidden_size, num_layers, num_classes):
        super(CNN, self).__init__()
        
        self.num_layers = num_layers
        self.hidden_size = hidden_size
        #self.hidden_size2-hidden_size2
        
        # 1D Convolutional layers
        self.cnn1 = nn.Conv1d(input_size, hidden_size, kernel_size=5, stride=1, padding=1)

        self.dropout=nn.Dropout(0.3)
        # Max pooling layer
        self.maxpool = nn.MaxPool1d(kernel_size=2, stride=1)
        # LSTM layer
       
        # Fully connected layer
        self.fc1 = nn.Linear(864, 128)
 
        self.act1=nn.ReLU()
        self.cnn=torch.nn.Sequential(self.cnn1,self.act1,self.maxpool)

        #self.act2=nn.Tanh()
#         self.act3=nn.ReLU()
#         self.act4=nn.Tanh()
#         self.act5=nn.Tanh()
        
    def forward(self, inp1,inp2):

        out1=self.cnn(inp1.transpose(1, 2))
        out2=self.cnn(inp2.transpose(1, 2))
        
        
        out1 = out1.transpose(1, 2)  # Transpose back to (batch_size, sequence_length, channels)
        out2 = out2.transpose(1, 2)
        out1=torch.reshape(out1,(out1.size(0),-1))
        out2=torch.reshape(out2,(out2.size(0),-1))
        #print(out1)


        # Fully connected layer
        emb1 = self.fc1(out1)  # Use the last timestep's output
        emb2 = self.fc1(out2)

        return emb1,emb2

I am getting nan from the output of the cnn. It was working first but then it is giving nan. Please help me solve this