class Network(nn.Module):
def __init__(self):
def forward(self, img):
def init_weights(self):
def train_step(self, batch):
img_stack1, labels1, image_stack2, labels2 = batch
out1 = self(img_stack1)
out2 = self(img_stack2)
loss1 = loss(out1[:, 0:3].unsqueeze(1), labels1))
loss2 = loss(out2[:, 0:3].unsqueeze(1), labels2))
loss = loss1+loss2
return loss
def train(epochs, model, train_loader, optimizer = .....):
optimizer = optim(model.parameters(), lr = 0.001)
model.init_weights()
for epoch in range(1, epochs+1):
loss_accumulator = 0.0
for batch in train_loader:
optimizer.zero_grad()
loss = model.training_step(batch)
loss.backward()
I have mentioned my code above, I am using shared network parameters. what ever i do the loss returns a value of 11.00 something. My getitem(), trainloader are working totally fine. Can someone help me out realize whats wrong i am doing, thank you.