TypeError: 'NoneType' object is not iterable

Excuse me, where is this mistake?
微信图片_20200315105107

for step, (s_data, s_label) in enumerate(src_data_loader):
            
            s_data = make_variable(s_data)
            s_label = make_variable(s_label.squeeze_())   
            
            # zero gradients for optimizer
            optimizer.zero_grad()

            # compute loss for critic
            preds = classifier(encoder(s_data))
            loss = criterion(preds, s_label)

            # optimize source classifier
            loss.backward()
            optimizer.step()

            # print step info
            if ((step + 1) % params.log_step_pre == 0):
                print("Epoch [{}/{}] Step [{}/{}]: loss={}"
                      .format(epoch + 1,
                              params.num_epochs_pre,
                              step + 1,
                              len(src_data_loader),
                              loss.item()))
src_data_loader = get_data_loader(params.src_dataset_name, batch_size=params.batch_size, train=True)

def get_data_loader(name, batch_size, train=True):
    """Get data loader by name."""
    if name == "maria":
        return get_lab_src(batch_size)
    elif name == "sandy":
        return get_lab_tgt(batch_size)
def get_lab_src(batch_size):

    src_dataset_root = scipy.io.loadmat('E:\\ADDA\\adda-lab\\datasets\\lab\\test_target_domain_maria.mat')
    s_data = src_dataset_root['testdata'] 
    s_data = torch.from_numpy(s_data).float()
    s_data_loader = torch.utils.data.DataLoader(s_data, batch_size=batch_size, shuffle=True, drop_last=True, num_workers=1)

    s_label = src_dataset_root['testlabel'] 
    s_label = torch.from_numpy(s_label).long() 
    s_label_loader = torch.utils.data.DataLoader(s_label, batch_size=batch_size, shuffle=True, drop_last=True, num_workers=1)


def get_lab_tgt(batch_size):

    tgt_dataset_root = h5py.File('E:\\ADDA\\adda-lab\\datasets\\lab\\test_target_domain_sandy.mat','r')
    t_data = tgt_dataset_root['data'][:]
    t_data = t_data.transpose(3,2,1,0)
    t_data_loader = torch.utils.data.DataLoader(t_data, batch_size=batch_size, shuffle=True, drop_last=True, num_workers=1)

    t_label = tgt_dataset_root['label'][:]
    t_label = np.transpose(t_label)
    t_label_loader = torch.utils.data.DataLoader(t_label, batch_size=batch_size, shuffle=True, drop_last=True, num_workers=1)

get_lab_src and get_lab_tgt does not return anything.

1 Like

Thank you! I neglected this.However, errors still occur. Is there any problem with my program?
微信图片_20200315124355

I don’t know what you’ve returned, but ideally you should have only one Dataloader which outputs both data and label instead of two Dataloaders output both respectively.

What I returned was this.I don’t quite understand the suggestion you follow. Can you tell me in detail which part I should modify?

return s_data_loader, s_label_loader

That’s not how dataset works, you only need one dataloader to return you all the data.
Instead of using:

s_data_loader = torch.utils.data.DataLoader(s_data, batch_size=batch_size, shuffle=True, drop_last=True, num_workers=1)
s_label_loader = torch.utils.data.DataLoader(s_label, batch_size=batch_size, shuffle=True, drop_last=True, num_workers=1)

Use something like:

dataset = torch.utils.data.TensorDataset(torch.Tensor(s_data), torch.Tensor(s_label))
loader = torch.utils.data.DataLoader(dataset, batch_size=batch_size, shuffle=True, drop_last=True, num_workers=1)
return loader

The documentation is here for torch.utils.data.TensorDataset

I encountered this error after modification. What is going on?
微信图片_20200315145538

def get_lab_src(batch_size):

    src_dataset_root = scipy.io.loadmat('E:\\ADDA\\adda-lab\\datasets\\lab\\test_target_domain_maria.mat')
    s_data = src_dataset_root['testdata'] 
    s_data = torch.from_numpy(s_data).float()
    

    s_label = src_dataset_root['testlabel'] 
    s_label = torch.from_numpy(s_label).long() 

    s_dataset = torch.utils.data.TensorDataset(s_data, s_label)
    s_loader = torch.utils.data.DataLoader(dataset=s_dataset, batch_size=batch_size, shuffle=True, drop_last=True, num_workers=1)

    return s_loader

Can you provide more information about which line causes the error?
Something like the traceback of the error would help.

I found the error. Thank you very much!

1 Like