I want to implement a residual unit. And I setup some arguments in the init function. However, I found that the value of the arguments were not assigned (i.e., the value of the arguments is still by default given value instead of ) after the 1st call. Can anyone can tell me where should I set up the additional arguments?
class SegNet(nn.Module):
def __init__(self):
self.conv1 = residualUnit3(64,64,isDilation=True)
def forward(x):
return self.conv1(x)
class residualUnit3(nn.Module):
def __init__(self, in_size, out_size, isDilation=False, isEmptyBranch1=False, activation=F.relu):
super(residualUnit3, self).__init__()
mid_size = out_size/2
if isDilation:
self.conv1 = nn.Conv2d(in_channels=in_size, out_channels=mid_size, kernel_size=1, stride=1, padding=0, dilation=2, bias=False)
else:
print 'line 90: isDilation',isDilation
self.conv1 = nn.Conv2d(in_channels=in_size, out_channels=mid_size, kernel_size=1, stride=1, padding=0, bias=False)
init.xavier_uniform(self.conv1.weight, gain=np.sqrt(2.0)) #or gain=1
self.bn1 = nn.BatchNorm2d(mid_size)
self.relu = nn.ReLU()
if isDilation:
self.conv2 = nn.Conv2d(in_channels=mid_size, out_channels=mid_size, kernel_size=3, stride=1, padding=2, dilation=2, bias=False)
else:
self.conv2 = nn.Conv2d(in_channels=mid_size, out_channels=mid_size, kernel_size=3, stride=1, padding=1, bias=False)
.........
def forward(self, x):
identity_data = x
output = self.relu(self.bn1(self.conv1(x)))
output = self.relu(self.bn2(self.conv2(output)))
...........
output = self.relu(output)
return output