How to get the model parameters while training with DataParallel

While in training procedure, I want to get and set the parameters of BN layer. The following code works on single GPU, but failed with multi. How could I fixe the code?

def get_adabn_params(model):
 new_dict = OrderedDict()
 for name, value in model.state_dict().items():
   if 'running_mean' in name or 'running_var' in name:
     new_dict[name] =  value
 return new_dict

def reset_adabn_params(model):
 new_dict = OrderedDict()
 for name, value in model.state_dict().items():
   if 'running_mean' in name:
     new_dict[name] = 0
   if 'running_var' in name:
     new_dict[name] =  1

 for key,value in new_dict.items():
   model.state_dict()[key].copy_(torch.from_numpy(np.array(value)))