Why, when changing batch size, do the weights of neurons change? If I test new data with a batch size equal to the size with which I trained NN, then the results are good. If you change the batch size, the results are bad.
class MyResNet(ResNet):
def __init__(self):
super(MyResNet, self).__init__(BasicBlock, [2, 2, 2, 2], num_classes=3)
self.conv1 = torch.nn.Conv2d(1, 64,
kernel_size=(7, 7),
stride=(2, 2),
padding=(3, 3), bias=False)
...
model.load_state_dict(torch.load('save.pth'))
criterion = nn.CrossEntropyLoss(reduction='sum')
optimizer = torch.optim.AdamW(model.parameters(), lr=learning_rate)
...
outputs = model(x)
loss1 = criterion(outputs, y)
optimizer.zero_grad()
loss1.backward()
optimizer.step()