Deploying custom model to ONXX

I have the following model defined in Pytorch. I want to export it to onxx (and eventually to onxxjs) so that I can deploy it.
However, due to the way I have defined it, I’m not sure if it can be directly exported without any issues, can anyone give me their thoughts on the matter

class AttentionModelMIL(nn.Module):
    def __init__(self):
        super(AttentionModelMIL, self).__init__()
        self.L = 1024
        self.D = 128
        self.K = 1

        self.attention = nn.Sequential(
            nn.Linear(self.L, self.D),
            nn.Tanh(),
            nn.Linear(self.D, self.K)
        )

        self.classifier = nn.Sequential(
            nn.Linear(self.L*self.K, 1),
            nn.Sigmoid()
        )

    def forward(self, x):
        A = self.attention(x)  # NxK
        A = A.to(device)
        A = torch.transpose(A, 1, 0)  # KxN
        A = F.softmax(A, dim=1)  # softmax over N
        M = torch.mm(A, x)  # KxL
        M = M.to(device)
        Y_prob = self.classifier(M)
        Y_prob = Y_prob.to(device)
        Y_hat = torch.ge(Y_prob, 0.5).float()

        return Y_prob, Y_hat, A
        
    def calculate_classification_error(self, X, Y):
        Y = Y.float()
        Y = Y.to(device)
        Y_prob, Y_hat, _ = self.forward(X)
        Y_prob, Y_hat = Y_prob.to(device), Y_hat.to(device)
        error = 1. - Y_hat.eq(Y).cpu().float().mean().data.item()

        return error, Y_hat, Y_prob

    def calculate_objective(self, X, Y):
        Y = Y.to(device)
        Y = Y.float()
        Y_prob, _, A = self.forward(X)
        Y_prob, A = Y_prob.to(device), A.to(device)
        Y_prob = torch.clamp(Y_prob, min=1e-5, max=1. - 1e-5)
        neg_log_likelihood = -1. * (Y * torch.log(Y_prob) + (1. - Y) * torch.log(1. - Y_prob)) + (1-Y)# negative log bernoulli
        neg_log_likelihood = neg_log_likelihood.to(device)