Am I right to share weights between two models?

I want share below ResBlock class between two models

class ResBlock(nn.Module):
def __init__(self):
    super(ResBlock, self).__init__()
    self.res_block = nn.Sequential(
        nn.ReLU(True),
        nn.Conv1d(DIM, DIM, 5, padding=2),#nn.Linear(DIM, DIM),
        nn.ReLU(True),
        nn.Conv1d(DIM, DIM, 5, padding=2),#nn.Linear(DIM, DIM),
    )

def forward(self, input):
    output = self.res_block(input)
    return input + (0.3*output)

So, I write code using ‘base’ module

    self.conv0_a = nn.Conv1d(len(charmap1), DIM, 1)        
    self.conv0_b = nn.Conv1d(len(charmap2), DIM, 1)
    self.block1_a = ResBlock()
    self.block1_b = ResBlock()
    self.base2 = ResBlock()
    self.base3 = ResBlock()
    self.base4 = ResBlock()
    self.base5 = ResBlock()
    self.linear = nn.Linear(SEQ_LEN*DIM, 2)

Am I right?