Replace the Fully connected layer of my model by an SVM

Hello everyone
Can someone help me with this? I want to replace the FC layers (f1 and f2) of my model with SVM but I really don’t know how to. Need your guidance. Thank you in advance

This is the model:

import torch
import torchvision # torch package for vision related things
import torch.nn.functional as F  # Parameterless functions, like (some) activation functions
import torchvision.datasets as datasets  # Standard datasets
import torchvision.transforms as transforms  # Transformations we can perform on our dataset for augmentation
from torch import optim  # For optimizers like SGD, Adam, etc.
from torch import nn  # All neural network modules
from torch.utils.data import DataLoader  # Gives easier dataset managment by creating mini batches etc.
from tqdm import tqdm  # For nice progress bar!

class CNNskl(nn.Module):
    def __init__(self, in_channels=3, num_classes=120):
        super(CNNskl, self).__init__()
        self.conv1 = nn.Conv2d(
            in_channels=in_channels,
            out_channels=128,
            kernel_size=3,
            stride=1,
            padding=2,
        )
        self.maxpool1 = nn.MaxPool2d(kernel_size=2)
        self.conv2 = nn.Conv2d(
            in_channels=128,
            out_channels=64,
            kernel_size=(3, 3),
            stride=1,
            padding=2,
        )

        self.maxpool2 = nn.MaxPool2d(kernel_size=2)

        self.conv3 = nn.Conv2d(
            in_channels=64,
            out_channels=32,
            kernel_size=3,
            stride=2,
            padding=2,
        )
        self.maxpool3 = nn.MaxPool2d(kernel_size=2)
        self.fc1 = nn.Linear(9248, 200)
        self.fc2 = nn.Linear(200, num_classes)
        self.dropout = nn.Dropout(0.5)

    def forward(self, x):
        # 64, 49, 100, 3
        # 64, 1200, 4, 9, 3
        
        # b, n, m, z, h, w = x.shape
        # x = x.view(b, z, h*n*m, w)
        b, n, H, W = x.shape
        #print(x.shape)
        x = self.conv1(x)
        x = F.relu(x)
        x = self.maxpool1(x)
        x = self.conv2(x)
        x = F.relu(x)
        x = self.maxpool2(x)
        x = self.conv3(x)
        x = F.relu(x)
        x = self.maxpool3(x)
        #b1, c, h1, w1 = x.shape proposition de yoik
        x = x.reshape(x.shape[0], -1)
        #x = self.dropout(x)
        #x = x.view(b1*c*h1*w1, -1) cest ce que yoik a ppropose
        #x = x.mean(dim=1) proposition de yoik
        x = self.fc1(x)
        x = F.relu(x)
        #x = self.dropout(x)
        #print(x.shape)
        x = self.fc2(x)
        #print(x.shape)
        return x