How to create Dataset with samples of different size?

I’m trying to create a point cloud dataset, but the number of points between samples may be differ.

The class of Dataset:

class PFGDataset : public torch::data::Dataset<PFGDataset> {
public:
    enum Mode {
        kTrain,
        kTest
    };

    explicit PFGDataset(const std::string& _root, Mode _mode = Mode::kTrain);
    ~PFGDataset();
    static std::pair<torch::Tensor, torch::Tensor> read_data(const std::string& _root, bool _train);
    torch::data::Example<> get(size_t _index) override;
    torch::optional<size_t> size() const override;
    bool is_train() const noexcept;
    const torch::Tensor& data() const;
    const torch::Tensor& targets() const;

private:
    torch::Tensor data_;
    torch::Tensor targets_;
    Mode mode_;
};

read_data method are used to initialize data_ and targets_ Tensor:

PFGDataset::PFGDataset(const std::string& _root, Mode _mode)
    : mode_(_mode)
{
    auto data = read_data(_root, mode_ == PFGDataset::Mode::kTrain);
    data_ = std::move(data.first);
    targets_ = std::move(data.second);
}

The read_data method:

std::pair<torch::Tensor, torch::Tensor> PFGDataset::read_data(const std::string& _root, bool _train)
{
    int i = 0;
    std::string ext = ".pcd";
    const auto num_samples = _train ? kTrainSize : kTestSize;
    const auto folder = _train ? _root + "/train" : _root + "/valid";
    auto num_points = 215146; // num_points of 1st sample
    auto point_clouds = torch::empty({ num_samples, num_points, 3 }, torch::kFloat); // xyz of points
    auto targets = torch::empty({ num_samples, num_points, 1 }, torch::kLong); // labels of points

    /*Load every labeled point cloud under directory*/
    std::cout << "Load every labeled point cloud under directory..." << std::endl;
    for (const auto& p : fs::directory_iterator(folder)) {
        if (p.path().extension() != ext) {
            continue;
        }
        pcl::PointCloud<pcl_ext::PointXYZLO>::Ptr data_cloud;
        data_cloud = pcl::make_shared<pcl::PointCloud<pcl_ext::PointXYZLO>>();
        auto flag = pcl::io::loadPCDFile(p.path().string(), *data_cloud); // Num of points in each PCD files may be differ
        if (flag != -1) {
            /*Input Tensor with sizes{Num,3}*/
            auto xyz = pointCloudToTensor<pcl_ext::PointXYZLO>(data_cloud);
            std::cout << "point_clouds[" << i << "][sizes]: " << xyz.sizes() << std::endl;
            point_clouds[i] = xyz; // xyz of points in i-th sample

            /*Target Tensor with sizes{Num,1}*/
            auto label_obj_map = Eigen::Map<Eigen::MatrixXi, 0, Eigen::Stride<sizeof(pcl_ext::PointXYZLO) / sizeof(int), 1>>(&data_cloud->points[0].label, 2, data_cloud->points.size());
            Eigen::VectorXi label = label_obj_map.row(0);
            torch::Tensor tensor_label = torch::from_blob(label.data(), label.rows(), torch::TensorOptions().dtype(torch::kInt)).unsqueeze(-1);
            std::cout << "tensor_label[" << i << "][sizes]: " << tensor_label.sizes() << std::endl;
            targets[i] = tensor_label.to(torch::kLong); // labels of points in i-th sample
        } else {
            continue;
        }

        if (i >= num_samples - 1)
            break;
        i++;
    }

    return { point_clouds, targets };
}

The terminal output:

Load every labeled point cloud under directory…
point_clouds[0][sizes]: [215146, 3]
tensor_label[0][sizes]: [215146, 1]
point_clouds[1][sizes]: [213377, 3]
terminate called after throwing an instance of ‘c10::Error’
what(): The size of tensor a (215146) must match the size of tensor b (213377) at non-singleton dimension 0

Is there any way to allow the dataset to have samples of different size?