Same output for random inputs

Hello, I am trying to train a model. After finishing some epoch in the train I try to test my model with some data. But always, I got the same output value.

input size: [Nx4x1000x1000x3]
outputz size: [51]

There is no problem in the Dataloader. It gives correct and different data for each step.

Model:

class Flatten(nn.Module):
    def forward(self, input):
        return input.view(input.size(0), -1)

class Third_Net(nn.Module):
    def __init__(self):
        super(Third_Net, self).__init__()
        self.conv1 = nn.Conv3d(4, 8, kernel_size=(3, 3, 1), stride=1)
        self.conv2 = nn.Conv3d(8, 16, kernel_size=(3, 3, 1), stride=1)
        self.conv3 = nn.Conv3d(16, 24, kernel_size=(5, 5, 1), stride=1)
        self.conv4 = nn.Conv3d(24, 24, kernel_size=(5, 5, 1), stride=1)
        self.conv5 = nn.Conv3d(24, 16, kernel_size=(3, 3, 1), stride=1)
        self.conv6 = nn.Conv3d(16, 8, kernel_size=(3, 3, 1), stride=1)
        self.conv7 = nn.Conv3d(8, 4, kernel_size=(3, 3, 1), stride=1)
        self.conv8 = nn.Conv3d(4, 1, kernel_size=(3, 3, 1), stride=1)
        self.mxpol = nn.MaxPool3d(kernel_size=(2, 2, 1))
        self.flatn = Flatten()
        self.dens1 = nn.Linear(10092, 2048)
        self.dens3 = nn.Linear(2048, 51)

    def forward(self, x):
        out = F.relu(self.conv1(x))
        out = self.mxpol(F.relu(F.dropout(self.conv2(out))))
        out = F.relu(self.conv3(out))
        out = self.mxpol(F.dropout(F.relu(self.conv4(out))))
        out = F.relu(self.conv5(out))
        out = self.mxpol(F.dropout(F.relu(self.conv6(out))))
        out = F.relu(self.conv7(out))
        out = self.mxpol(F.dropout(F.relu(self.conv8(out))))
        out = self.flatn(out)
        out = F.relu(self.dens1(out))
        out = self.dens3(out)

        return out

train code:

criterion = nn.MSELoss(size_average=True).cuda(0)
optimizer = torch.optim.Adam(model.parameters(),lr=0.01)
model.train()

train_loader = None
train_loader = DataLoader(dataset=Human36Mn(optin.root_dir, optin.jumper),shuffle=False, batch_size=optin.batch_size)
for idx, (images, labels) in tqdm(enumerate(train_loader)):
            images = Variable(images.cuda()).float()
            labels = Variable(labels.cuda(async=True)).float()

            outputs = model(images)

            optimizer.zero_grad()
            loss = criterion(outputs, labels)
            loss.backward()
            optimizer.step()

Testing:

tar = torch.load('../checkpoint/normalize/ckpt_last_1_318.pth.tar')
model = Third_Net()
model = model.cpu()
model.load_state_dict(tar['state_dict'])

model.eval()
prev = np.random.rand(51)
for idx, i in enumerate(range(20)):
    a = torch.rand([1,4,1000,1000,3])
    inputt = Variable(a).cpu()#.float()
    outputs = model(inputt)
    if np.array_equal(prev, np.array(outputs.data[0])):
        print idx, 'equal'
    prev = np.array(outputs.data[0])

I am getting the same output for every random input. What may be that I missing

Thanks in advance to everyone.