ONNX export of scripted model does not support interpolate

scripted_model = torch.jit.script(model, input_image_pytorch.to(device).detach())
torch.onnx.export(scripted_model.to(device),               # model being run
                  input_image_pytorch.to(device),                         # model input (or a tuple for multiple inputs)
                  "scripted_monodepth2.onnx",   # where to save the model (can be a file or file-like object)
                  export_params=True,        # store the trained parameter weights inside the model file
                  input_names = ['input'],   # the model's input names
                  output_names = ['output'], # the model's output names
                                )

gives the following error

OnnxExporterError                         Traceback (most recent call last)
Cell In[8], line 26
     24 model = QuantizedDepth(encoder=encoder, decoder=depth_decoder, for_quantization=False).eval().to(device)
     25 scripted_model = torch.jit.script(model, input_image_pytorch.to(device).detach())
---> 26 torch.onnx.export(scripted_model.to(device),               # model being run
     27                   input_image_pytorch.to(device),                         # model input (or a tuple for multiple inputs)
     28                   "scripted_monodepth2.onnx",   # where to save the model (can be a file or file-like object)
     29                   export_params=True,        # store the trained parameter weights inside the model file
     30                   # opset_version=11,          # the ONNX version to export the model to
     31                   # do_constant_folding=True,  # whether to execute constant folding for optimization. buggy in torch == 1.12
     32                   input_names = ['input'],   # the model's input names
     33                   output_names = ['output'], # the model's output names
     34                 #   dynamic_axes={'input' : {0 : 'batch_size'},    # variable length axes
     35                 #                 'output' : {0 : 'batch_size'}}
     36                                 )
     37 print("Size (MB):", os.path.getsize("traced_monodepth2.onnx")/1e6)
     38 print("Size (MB):", os.path.getsize("monodepth2.onnx")/1e6)

File ~/miniconda3/envs/inference/lib/python3.8/site-packages/torch/onnx/utils.py:504, in export(model, args, f, export_params, verbose, training, input_names, output_names, operator_export_type, opset_version, do_constant_folding, dynamic_axes, keep_initializers_as_inputs, custom_opsets, export_modules_as_functions)
    186 @_beartype.beartype
    187 def export(
    188     model: Union[torch.nn.Module, torch.jit.ScriptModule, torch.jit.ScriptFunction],
   (...)
    204     export_modules_as_functions: Union[bool, Collection[Type[torch.nn.Module]]] = False,
    205 ) -> None:
    206     r"""Exports a model into ONNX format.
    207 
    208     If ``model`` is not a :class:`torch.jit.ScriptModule` nor a
   (...)
    501             All errors are subclasses of :class:`errors.OnnxExporterError`.
    502     """
--> 504     _export(
    505         model,
    506         args,
    507         f,
    508         export_params,
    509         verbose,
    510         training,
    511         input_names,
    512         output_names,
    513         operator_export_type=operator_export_type,
    514         opset_version=opset_version,
    515         do_constant_folding=do_constant_folding,
    516         dynamic_axes=dynamic_axes,
    517         keep_initializers_as_inputs=keep_initializers_as_inputs,
    518         custom_opsets=custom_opsets,
    519         export_modules_as_functions=export_modules_as_functions,
    520     )

File ~/miniconda3/envs/inference/lib/python3.8/site-packages/torch/onnx/utils.py:1529, in _export(model, args, f, export_params, verbose, training, input_names, output_names, operator_export_type, export_type, opset_version, do_constant_folding, dynamic_axes, keep_initializers_as_inputs, fixed_batch_size, custom_opsets, add_node_names, onnx_shape_inference, export_modules_as_functions)
   1526     dynamic_axes = {}
   1527 _validate_dynamic_axes(dynamic_axes, model, input_names, output_names)
-> 1529 graph, params_dict, torch_out = _model_to_graph(
   1530     model,
   1531     args,
   1532     verbose,
   1533     input_names,
   1534     output_names,
   1535     operator_export_type,
   1536     val_do_constant_folding,
   1537     fixed_batch_size=fixed_batch_size,
   1538     training=training,
   1539     dynamic_axes=dynamic_axes,
   1540 )
   1542 # TODO: Don't allocate a in-memory string for the protobuf
   1543 defer_weight_export = (
   1544     export_type is not _exporter_states.ExportTypes.PROTOBUF_FILE
   1545 )

File ~/miniconda3/envs/inference/lib/python3.8/site-packages/torch/onnx/utils.py:1115, in _model_to_graph(model, args, verbose, input_names, output_names, operator_export_type, do_constant_folding, _disable_torch_constant_prop, fixed_batch_size, training, dynamic_axes)
   1112 params_dict = _get_named_param_dict(graph, params)
   1114 try:
-> 1115     graph = _optimize_graph(
   1116         graph,
   1117         operator_export_type,
   1118         _disable_torch_constant_prop=_disable_torch_constant_prop,
   1119         fixed_batch_size=fixed_batch_size,
   1120         params_dict=params_dict,
   1121         dynamic_axes=dynamic_axes,
   1122         input_names=input_names,
   1123         module=module,
   1124     )
   1125 except Exception as e:
   1126     torch.onnx.log("Torch IR graph at exception: ", graph)

File ~/miniconda3/envs/inference/lib/python3.8/site-packages/torch/onnx/utils.py:663, in _optimize_graph(graph, operator_export_type, _disable_torch_constant_prop, fixed_batch_size, params_dict, dynamic_axes, input_names, module)
    660     _C._jit_pass_onnx_set_dynamic_input_shape(graph, dynamic_axes, input_names)
    661 _C._jit_pass_onnx_lint(graph)
--> 663 graph = _C._jit_pass_onnx(graph, operator_export_type)
    664 _C._jit_pass_onnx_lint(graph)
    665 _C._jit_pass_lint(graph)

File ~/miniconda3/envs/inference/lib/python3.8/site-packages/torch/onnx/utils.py:1899, in _run_symbolic_function(graph, block, node, inputs, env, operator_export_type)
   1894     if symbolic_fn is not None:
...
    590         value,
    591     )
--> 592 raise errors.OnnxExporterError(message)

OnnxExporterError: Unsupported: ONNX export of operator interpolate (with scales), missing input shape. Please feel free to request support or submit a pull request on PyTorch GitHub: https://github.com/pytorch/pytorch/issues

The model contains a torch.nn.functional.interpolate(x, scale_factor=2.0, mode="nearest"), which causes the error. Works without JIT Scripting