Excuse me, where is this mistake?
for step, (s_data, s_label) in enumerate(src_data_loader):
s_data = make_variable(s_data)
s_label = make_variable(s_label.squeeze_())
# zero gradients for optimizer
optimizer.zero_grad()
# compute loss for critic
preds = classifier(encoder(s_data))
loss = criterion(preds, s_label)
# optimize source classifier
loss.backward()
optimizer.step()
# print step info
if ((step + 1) % params.log_step_pre == 0):
print("Epoch [{}/{}] Step [{}/{}]: loss={}"
.format(epoch + 1,
params.num_epochs_pre,
step + 1,
len(src_data_loader),
loss.item()))
src_data_loader = get_data_loader(params.src_dataset_name, batch_size=params.batch_size, train=True)
def get_data_loader(name, batch_size, train=True):
"""Get data loader by name."""
if name == "maria":
return get_lab_src(batch_size)
elif name == "sandy":
return get_lab_tgt(batch_size)
def get_lab_src(batch_size):
src_dataset_root = scipy.io.loadmat('E:\\ADDA\\adda-lab\\datasets\\lab\\test_target_domain_maria.mat')
s_data = src_dataset_root['testdata']
s_data = torch.from_numpy(s_data).float()
s_data_loader = torch.utils.data.DataLoader(s_data, batch_size=batch_size, shuffle=True, drop_last=True, num_workers=1)
s_label = src_dataset_root['testlabel']
s_label = torch.from_numpy(s_label).long()
s_label_loader = torch.utils.data.DataLoader(s_label, batch_size=batch_size, shuffle=True, drop_last=True, num_workers=1)
def get_lab_tgt(batch_size):
tgt_dataset_root = h5py.File('E:\\ADDA\\adda-lab\\datasets\\lab\\test_target_domain_sandy.mat','r')
t_data = tgt_dataset_root['data'][:]
t_data = t_data.transpose(3,2,1,0)
t_data_loader = torch.utils.data.DataLoader(t_data, batch_size=batch_size, shuffle=True, drop_last=True, num_workers=1)
t_label = tgt_dataset_root['label'][:]
t_label = np.transpose(t_label)
t_label_loader = torch.utils.data.DataLoader(t_label, batch_size=batch_size, shuffle=True, drop_last=True, num_workers=1)