ONNX export failed on unsafe_chunk

torch/onnx/symbolic_helper.py:258: UserWarning: ONNX export failed on unsafe_chunk because unknown dimension size not supported

I was trying to export the Tacotron2 model provided by torchaudio:

import torch
import torchaudio
import onnx

bundle = torchaudio.pipelines.TACOTRON2_WAVERNN_PHONE_LJSPEECH
processor = bundle.get_text_processor()
tacotron2 = bundle.get_tacotron2().to(device)

text = "Hello world! Text to speech!"

with torch.inference_mode():
    processed, lengths = processor(text)
    processed = processed.to(device)
    lengths = lengths.to(device)
    spec, _, _ = tacotron2.infer(processed, lengths)


class OnnxWrapper(torch.nn.Module):
    def __init__(self, tacotron):
        super().__init__()
        self.tacotron = tacotron

    def forward(self, processed, lengths):
        spec, _, _ = self.tacotron.infer(processed, lengths)
        return spec


onnx_wrapper = OnnxWrapper(tacotron2).eval()

with torch.inference_mode():
    torch.onnx.export(to,
                      (processed, lengths),
                      "tacotron2.onnx",
                      training=torch.onnx.TrainingMode.EVAL,
                      export_params=True,
                      opset_version=11,
                      do_constant_folding=True,
                      input_names=['processed', 'lengths'],
                      output_names=['spec'])

From grepping pytorch’s source code, it seems that unsafe_cunk is only called in RNN.cpp for the LSTM and GRU modules.

However, there is no problem exporting LSTM and GRU modules to onnx, so I’m confused by what’s causing the error.