How to convert custom dataset trained faster rcnn object detection model to Tflite?

I would like to make an app with pre-trained faster rcnn object detection model, I trained it on custom dataset. I want to make that app in Flutter but it does not support pytorch object detection models, only tensorflow lite models. Is it possible to make that app with tflite model(convert this model to tflite)? Thanks for any suggestions this is my code I saved the model as the file and it works fine on video with cv2.

import os
import torchvision
import torch
from PIL import Image, ImageDraw
import numpy as np
import torchvision.transforms as T
import matplotlib.pyplot as plt
from torch.utils.data import Dataset
import xml.etree.ElementTree as ET
from torchvision import transforms

from torchvision.models.detection.faster_rcnn import FastRCNNPredictor
def get_instance_segmentation_model(num_classes):
    # load an instance segmentation model pre-trained on COCO
    model = torchvision.models.detection.fasterrcnn_resnet50_fpn(pretrained=True)

    # get the number of input features for the classifier
    in_features = model.roi_heads.box_predictor.cls_score.in_features
    # replace the pre-trained head with a new one
    model.roi_heads.box_predictor = FastRCNNPredictor(in_features, num_classes)
    return model

device = 'cuda:0'
model = get_instance_segmentation_model(4)
model = model.to(device)

class CustomDataset(Dataset):
    def __init__(self, img_dir=r'D:\Datasets\Apple obj-det\train', transform=None):
        self.img_dir = img_dir
        self.transform = transform
        self.names = os.listdir(img_dir)
        self.img_names = [(os.path.join(img_dir, name)) for name in self.names if not self.names.index(name)%2]
        self.ann_names = [(os.path.join(img_dir, name)) for name in self.names if self.names.index(name)%2]
    def __getitem__(self, idx):
        img_name = self.img_names[idx]
        img = Image.open(img_name).convert("RGB")
        ann_name = self.ann_names[idx]
        ann_tree = ET.parse(ann_name)
        boxes = [[] for i in range(len(ann_tree.findall('object')))]
        labels = []
        lab_leg = {'apple': 1, 'banana':2, 'orange':3}
        for i, elem in enumerate(ann_tree.findall('object')):
            lab = list(elem)[0].text
            labels.append(lab_leg[lab])
            for ch in list(elem):
                for j in list(ch):
                    boxes[i].append(int(j.text))
        areas = [(i[2] - i[0]) * (i[3] - i[1]) for i in boxes]
        areas = torch.as_tensor(areas, dtype=torch.int64)
        bndbox = torch.as_tensor(boxes, dtype=torch.float32)
        labels = torch.as_tensor(labels, dtype=torch.int64)
        target = {}
        target['boxes'] = bndbox
        target['labels'] = labels
        target['image_id'] = torch.as_tensor([idx], dtype=torch.int64)
        target['area'] = areas
        target['iscrowd'] = torch.zeros((len(self.img_names),), dtype=torch.uint8)
        if self.transform is not None:
            img = self.transform(img)
        return img, target
    def __len__(self):
        return len(self.img_names)

def get_transform(train):
    transforms = []
    transforms.append(T.ToTensor())
    return T.Compose(transforms)

cus = CustomDataset(transform=get_transform(True))
test = CustomDataset(img_dir=r'D:\Datasets\Apple obj-det\test', transform=get_transform(True))

import utils
from engine import train_one_epoch, evaluate
data_loader = torch.utils.data.DataLoader(
    cus, batch_size=1, shuffle=True, num_workers=0,
    collate_fn=utils.collate_fn)
test_loader = torch.utils.data.DataLoader(
    test, batch_size=1, shuffle=False, num_workers=0,
    collate_fn=utils.collate_fn)
params = [p for p in model.parameters() if p.requires_grad]
optimizer = torch.optim.SGD(params, lr=0.005,
                            momentum=0.9, weight_decay=0.0005)
lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=3, gamma=0.1)
for epoch in range(10):
    train_one_epoch(model, optimizer, data_loader, device, epoch, print_freq=10)
    lr_scheduler.step()
    evaluate(model, test_loader, device=device)

torch.save(model, 'fruit_det_mod.pth')
mod = torch.load('fruit_det_mod.pth')

hi @Jaredeco did you had any success related to the above mentioned issue?