Unable to save the model in torchscript module format

Hi, I’m trying to save the model in torchscript module format. Unfortunately, I’m getting below error

---------------------------------------------------------------------------
NotSupportedError                         Traceback (most recent call last)
/tmp/ipykernel_127656/3223171615.py in <module>
----> 1 scripted_model = torch.jit.script(model)

~/miniconda3/envs/tensorrt/lib/python3.7/site-packages/torch/jit/_script.py in script(obj, optimize, _frames_up, _rcb, example_inputs)
   1264         obj = call_prepare_scriptable_func(obj)
   1265         return torch.jit._recursive.create_script_module(
-> 1266             obj, torch.jit._recursive.infer_methods_to_compile
   1267         )
   1268 

~/miniconda3/envs/tensorrt/lib/python3.7/site-packages/torch/jit/_recursive.py in create_script_module(nn_module, stubs_fn, share_types, is_tracing)
    452     if not is_tracing:
    453         AttributeTypeIsSupportedChecker().check(nn_module)
--> 454     return create_script_module_impl(nn_module, concrete_type, stubs_fn)
    455 
    456 def create_script_module_impl(nn_module, concrete_type, stubs_fn):

~/miniconda3/envs/tensorrt/lib/python3.7/site-packages/torch/jit/_recursive.py in create_script_module_impl(nn_module, concrete_type, stubs_fn)
    464     """
    465     cpp_module = torch._C._create_module_with_type(concrete_type.jit_type)
--> 466     method_stubs = stubs_fn(nn_module)
    467     property_stubs = get_property_stubs(nn_module)
    468     hook_stubs, pre_hook_stubs = get_hook_stubs(nn_module)

~/miniconda3/envs/tensorrt/lib/python3.7/site-packages/torch/jit/_recursive.py in infer_methods_to_compile(nn_module)
    733     stubs = []
    734     for method in uniqued_methods:
--> 735         stubs.append(make_stub_from_method(nn_module, method))
    736     return overload_stubs + stubs
    737 

~/miniconda3/envs/tensorrt/lib/python3.7/site-packages/torch/jit/_recursive.py in make_stub_from_method(nn_module, method_name)
     64     # In this case, the actual function object will have the name `_forward`,
     65     # even though we requested a stub for `forward`.
---> 66     return make_stub(func, method_name)
     67 
     68 

~/miniconda3/envs/tensorrt/lib/python3.7/site-packages/torch/jit/_recursive.py in make_stub(func, name)
     49 def make_stub(func, name):
     50     rcb = _jit_internal.createResolutionCallbackFromClosure(func)
---> 51     ast = get_jit_def(func, name, self_name="RecursiveScriptModule")
     52     return ScriptMethodStub(rcb, ast, func)
     53 

~/miniconda3/envs/tensorrt/lib/python3.7/site-packages/torch/jit/frontend.py in get_jit_def(fn, def_name, self_name, is_classmethod)
    262         pdt_arg_types = type_trace_db.get_args_types(qualname)
    263 
--> 264     return build_def(parsed_def.ctx, fn_def, type_line, def_name, self_name=self_name, pdt_arg_types=pdt_arg_types)
    265 
    266 # TODO: more robust handling of recognizing ignore context manager

~/miniconda3/envs/tensorrt/lib/python3.7/site-packages/torch/jit/frontend.py in build_def(ctx, py_def, type_line, def_name, self_name, pdt_arg_types)
    300                        py_def.col_offset + len("def"))
    301 
--> 302     param_list = build_param_list(ctx, py_def.args, self_name, pdt_arg_types)
    303     return_type = None
    304     if getattr(py_def, 'returns', None) is not None:

~/miniconda3/envs/tensorrt/lib/python3.7/site-packages/torch/jit/frontend.py in build_param_list(ctx, py_args, self_name, pdt_arg_types)
    335             if arg is not None:
    336                 ctx_range = build_expr(ctx, arg).range()
--> 337                 raise NotSupportedError(ctx_range, _vararg_kwarg_err)
    338 
    339     # List of Tuple of args and type as inferred by profile directed typing

NotSupportedError: Compiled functions can't take variable number of arguments or use keyword-only arguments with defaults:
  File "/home/iamalien/Desktop/my_files/semantic_segmentation_example/semantic-segmentation-pytorch/sage_example/code/mit_semseg/models/models.py", line 29
    def forward(self, feed_dict, *, segSize=None):
                                            ~~~~ <--- HERE
        # training
        if segSize is None:

this is the github repository i’m using to build the model. could you please help me save the model in torchscript format?
@ptrblck