Custom methods in DistributedDataParallel

A little more details on my method. Pseudo code is

class model(nn.Module):
  def __init__(self) :
    super(model, self).__init__()
    self.encoder = Encoder()
    self.decoder = Decoder()
    self.mlp = MLP()
  def encode(self, x):
    return self.encoder(x)
  def decode(self, x): 
    return self.decoder(x)
  def classify(self, a, b)
    return self.mlp(a, b)
  def forward(self, x):
    enc = self.encode(x)
    out = self.decode(enc)
    return enc, out
# this is my main training script
enc, out = model(x)
enc2 = enc + d #d is some random perturbations
out2 = model.module.decode(enc2)
pred = model.module.classify(enc, enc2)

There are a bunch of other stuff, but in this scenario, my decode function is using the parameters in model? Would this be an issue? There are no errors when running.