Weight initilzation

@Atcold, by checking if isinstance(m, nn.Linear) it would apply to linear module, correct?
If I call the weight initialization, it would be applied to all layers?
I have a residual module each having 2 linear layers. Then several of these modules.

my code:

def weight_init(m): 
	if isinstance(m, nn.Linear):
		size = m.weight.size()
		fan_out = size[0] # number of rows
		fan_in = size[1] # number of columns
		variance = np.sqrt(2.0/(fan_in + fan_out))
		m.weight.data.normal_(0.0, variance)


class Residual(nn.Module):
	def __init__(self,dropout, shape, negative_slope, BNflag = False):
		super(Residual, self).__init__()
		self.dropout = dropout
		self.linear1 = nn.Linear(shape[0],shape[1])
		self.linear2 = nn.Linear(shape[1],shape[0])
		self.dropout = nn.Dropout(self.dropout)
		self.BNflag = BNflag
		self.batch_normlization = nn.BatchNorm1d(shape[0])
		self.leakyRelu = nn.LeakyReLU(negative_slope = negative_slope , inplace=False)

	def forward(self, X):
		x = X
		if self.BNFlag:
			x = self.batch_normlization(x)
		x = self.leakyRelu(x)
		x = self.dropout(x)
		x = self.linear1(x)
		if self.BNFlag:
			x = self.batch_normlization(x)
		x = self.leakyRelu(x)
		x = self.dropout(x)
		x = self.linear2(x)
		x = torch.add(x,X)
		return x
		
		
class FullyCN(nn.Module):
	def __init__(self, args):
		super(FullyCN, self).__init__()
		self.numlayers = arg.sm-num-hidden-layers
		self.learning-rate= args.sm-learning-rate
		self.dropout = arg.sm-dropout-prob
		self.BNflag = args.sm-bn
		self.shape = [args.sm-input-size,args.sm-num-hidden-units]		
		self.res =  Residual(self.dropout,self.shape,args.sm_act_param,self.self.BNflag)
		self.res(weight_init)
		self.res-outpus = []

	def forward(self,X):
		self.res-outpus.append(self.res(X))
		for i in range(self.numlayers):
			self.res-outpus.append(self.res(self.res-outpus[-1]))
		return self.res-outpus[-1]
2 Likes