Some questions with custom Dataset

Hello everyone:
I want to rewrite a dataset for a binary classification problem. For there is a imbalance between positive and negative samples, I augment the positive with 300 times. I defined two variables self.pos_num and self.neg_num in function init() to keep the numbers of pos/neg samples which have been taken. and updated them in function getitem(). Howerer, I found some mistakes happened:after one epoch,the number of pos and neg samples taken for training are not match the truth number of them, and with the increase of accuracy in training stage.in valuation stage ,the accuracy of the same samples feeded in training stage are change randomly.
Here is my code:

class DataFalsePositiveClassifier(Dataset):
    def __init__(self, split, config, phase='train'):  
        assert (phase == 'train' or phase == 'val' or phase == 'test')
        self.augtype = config['augtype']
        self.datadir = config['datadir']
        bboxpath = config['bboxpath']
        test_bboxpath = config['test_bboxpath']
        self.phase = phase
        self.pos_boxes = []#Position and diameterof a nodule (z,x,y,d)
        self.neg_boxes = []
        self.all_boxes = []
        self.all_labels = []
        self.pos_num = 0
        self.neg_num = 0
        idcs = split  
        if phase != 'test':
            for idx in idcs:
                pbb = np.load(os.path.join(bboxpath[0], idx + '_label.npy'))
                if pbb[0][3] != 0:
                    for box in pbb:
                        box = list(box)
                        box = [idx] + box #id x y z d
                        self.pos_boxes.append(box)
                

                pbb = np.load(os.path.join(bboxpath[1], idx + '_label.npy'))
                for box in pbb:
                    box = list(box)
                    box = [idx] + box
                    self.neg_boxes.append(box)
           
        else:
            for idx in idcs:
                pbb = np.load(os.path.join(test_bboxpath, idx + '_pbb.npy'))
                for box in pbb:
                    box = list(box)
                    prob = sigmoid(box[0])
                    if prob>0.45:
                        box = [idx] + box[1:]
                        self.all_boxes.append(box)
            print(len(self.all_boxes))
        self.crop = simpleCrop(config, phase)

    def __getitem__(self, idx, split=None):
        #if self.phase == 'train':
        if self.phase == 'train':
            flag = np.random.randint(3)
            if self.neg_num >= len(self.neg_boxes):
                pbb = self.pos_boxes[self.pos_num%len(self.pos_boxes)]
                pbb_label = np.array([1])
                self.pos_num += 1
            elif self.pos_num >= len(self.pos_boxes)*300:
                pbb = self.neg_boxes[self.neg_num]
                pbb_label = np.array([0])
                self.neg_num += 1
            elif flag<2:
                pbb = self.neg_boxes[self.neg_num]
                pbb_label = np.array([0])
                self.neg_num+=1
            else:
                pbb = self.pos_boxes[self.pos_num%len(self.pos_boxes)]
                pbb_label = np.array([1])
                self.pos_num+=1
     
        elif self.phase == 'val':
            if idx>=len(self.pos_boxes):
                pbb = self.neg_boxes[idx-len(self.pos_boxes)]
                pbb_label = np.array([0])
            else:
                pbb = self.pos_boxes[idx]
                pbb_label = np.array([1])
        else:
            pbb = self.all_boxes[idx]
        target = pbb[1:]
        uid = pbb[0]

        img = np.load(os.path.join(self.datadir, '%s_clean.npy' % uid))

        if self.phase =='train':
            crops = self.crop(img, target, pbb_label)
        else:
            crops = self.crop(img, target)

        if self.phase == 'train' and pbb_label == 1:
            result = []
            for crop in crops:
                after_crop = augment(crop, ifflip=self.augtype['flip'], ifrotate=self.augtype['rotate'],
                               ifswap=self.augtype['swap'])
                result.append(after_crop)
            crop1,crop2 = result
        else:
            crop1,crop2 = crops
        crop1 = crop1.astype(np.float32)
        crop2 = crop2.astype(np.float32)
        if self.phase != 'test':
            return torch.from_numpy(crop1).float(), torch.from_numpy(crop2).float(),torch.from_numpy(pbb_label).float()
        else:
            pbb[0]=int(pbb[0])
            return torch.from_numpy(crop1).float(), torch.from_numpy(crop2).float(),torch.from_numpy(np.array(pbb))

    def __len__(self):
        if self.phase == 'train':
            return len(self.pos_boxes)*300+len(self.neg_boxes)
        elif self.phase =='val':
            #return len(self.pos_boxes)+len(self.neg_boxes)
            return len(self.pos_boxes)*5
        else:
            return len(self.all_boxes)
cls_net = DataParallel(cls_net)
dataset = DataFalsePositiveClassifier(trainsplit, config, phase='train')
dataloader = DataLoader(dataset, batch_size=args.batch_size,
                                       shuffle=True, num_workers=args.workers, pin_memory=True)
val_dataset = DataFalsePositiveClassifier(trainsplit, config, phase='val')
val_dataloader = DataLoader(val_dataset, batch_size=args.batch_size,
                                shuffle=False, num_workers=args.workers, pin_memory=True)
 for epoch in range(53,epochs+1):
     train_cls(dataloader, cls_net, loss, epoch, optimizer, save_freq, save_dir,have_cuda)
     val_cls(val_dataloader, cls_net, have_cuda)

Train, epoch 40, mean_loss 0.0133, mean_acc 0.9840, tpn 362904, tnn 730073, fpn 11512, fnn 6285, time 2983.85
Val, mean_acc 0.7582, tpn 448, tnn 4048, fpn 696, fnn 738
Train, epoch 41, mean_loss 0.0132, mean_acc 0.9842, tpn 363028, tnn 730168, fpn 11426, fnn 6152, time 2982.28
Val, mean_acc 0.5521, tpn 568, tnn 2706, fpn 2038, fnn 618
Train, epoch 42, mean_loss 0.0130, mean_acc 0.9852, tpn 363409, tnn 730981, fpn 10607, fnn 5777, time 2978.14
Val, mean_acc 0.3627, tpn 761, tnn 1390, fpn 3354, fnn 425
Train, epoch 43, mean_loss 0.0126, mean_acc 0.9868, tpn 364057, tnn 732070, fpn 9509, fnn 5138, time 2982.56
Val, mean_acc 0.6877, tpn 553, tnn 3525, fpn 1219, fnn 633
Train, epoch 44, mean_loss 0.0120, mean_acc 0.9895, tpn 364923, tnn 734158, fpn 7545, fnn 4148, time 2987.02
Val, mean_acc 0.4970, tpn 614, tnn 2333, fpn 2411, fnn 572
Train, epoch 45, mean_loss 0.0123, mean_acc 0.9881, tpn 364609, tnn 733001, fpn 8622, fnn 4542, time 2984.24
Val, mean_acc 0.2852, tpn 886, tnn 805, fpn 3939, fnn 300
Train, epoch 46, mean_loss 0.0119, mean_acc 0.9897, tpn 365131, tnn 734151, fpn 7429, fnn 4063, time 2986.59
Val, mean_acc 0.6408, tpn 581, tnn 3219, fpn 1525, fnn 605
Train, epoch 47, mean_loss 0.0118, mean_acc 0.9901, tpn 365374, tnn 734431, fpn 7133, fnn 3836, time 2983.04
Val, mean_acc 0.5727, tpn 674, tnn 2722, fpn 2022, fnn 512
Train, epoch 48, mean_loss 0.0116, mean_acc 0.9908, tpn 365660, tnn 734857, fpn 6712, fnn 3545, time 2983.45
Val, mean_acc 0.5830, tpn 601, tnn 2856, fpn 1888, fnn 585
Train, epoch 49, mean_loss 0.0115, mean_acc 0.9912, tpn 365843, tnn 735176, fpn 6391, fnn 3364, time 2983.26
Val, mean_acc 0.4477, tpn 725, tnn 1930, fpn 2814, fnn 461
Train, epoch 50, mean_loss 0.0114, mean_acc 0.9920, tpn 366086, tnn 735833, fpn 5755, fnn 3100, time 2990.12
Val, mean_acc 0.7137, tpn 622, tnn 3610, fpn 1134, fnn 564