Hi everyone
I am a beginner here and just ran into a problem. I have this model but I can not figure out why the loss function is not converging. the loss just fluctuates around the same value.
here is the code:
import torch
import torch.nn as nn
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
class ConvBlock(nn.Module):
def __init__(self,in_channel, out_channel,):
super().__init__()
self.conv_block = nn.Sequential(
nn.Conv2d(in_channel,out_channel,kernel_size=3,padding='same'),
nn.BatchNorm2d(out_channel),
nn.Conv2d(out_channel,out_channel,kernel_size=3,padding='same'),
nn.BatchNorm2d(out_channel),
nn.ReLU()
)
self.conv_block.cuda()
def forward(self,x):
return self.conv_block(x)
class FFNN(nn.Module):
def __init__(self,in_channel , out_channel=10,hidden_layers=[4096,256,64]) -> None:
super().__init__()
self.block = nn.Sequential()
self.in_channel = in_channel
self.out_channel = out_channel
self.hidden_layers = hidden_layers
for i,layer in enumerate(self.hidden_layers):
self.block.add_module(f'Linear_{i}',nn.Linear(self.in_channel,layer))
self.block.add_module(f'Relu_{i}',nn.ReLU())
self.in_channel = layer
self.block.add_module('Linear2',nn.Linear(self.hidden_layers[-1],self.out_channel))
self.block.cuda()
def forward(self,x):
return self.block(x)
class UnetEncoder(nn.Module):
def __init__(self,in_channel=1,features=[64,128,256,512], hidden_layers=None):
super().__init__()
self.pool = nn.AvgPool2d(kernel_size=(2,2),stride=2)
self.encoders = nn.ModuleList()
self.hidden_layers = hidden_layers
for feature in features:
self.encoders.append(ConvBlock(in_channel=in_channel,out_channel=feature))
in_channel = feature
def forward(self,x):
for encoder in self.encoders:
x = encoder(x)
x = self.pool(x)
x = x.reshape(x.size(0),-1)
if self.hidden_layers:
final = FFNN(in_channel=x.size(-1),hidden_layers=self.hidden_layers)
else:
final = FFNN(in_channel=x.size(-1))
return final(x)