Error about hooks in implementing customized modules

Hi, I just implemented an customized modules as follows:

class Loss(torch.nn.Module):
’’‘
Implement the loss function from output from RNN.
Ref paper: https://arxiv.org/abs/1308.0850
’’‘
def init(self):
’’‘
x is sequence of coordinates with dim (batch, seq_length, 3).
Parameters are sequence of output from rnn with dim (batch, seq_length, 128).
’’'
self.e = [] # predicted end of stroke probability scalar
self.m1 = [] # vector of means for x1 with len 20
self.m2 = [] # vector of means for x2 with len 20
self.pi = [] # vector of mixture density network coefficients with len 20
self.rho = [] # vector of correlation with len 20
self.s1 = [] # vector of standard deviation for x1 with len 20
self.s2 = [] # vector of standard deviation for x2 with len 20
self.x1 = [] # x1 coordinate at t+1
self.x2 = [] # x2 coordinates at t + 1
self.et = [] # end of probability indicator from ground truth
self.batch = 0 # batch size
self.seq_length = 0 # reduce by 1 because loss is caculated at t+1 timestamp
self.parameters = []

def forward(self, x, para):
	''' 
	Implement eq 26 of ref paper for each batch.
	Input:
		para: dim(seq_len, batch, 121)
		x:    dim(seq_len, batch, 3)
	'''
	if x.size()[0] == para.size()[0]:
		self.seq_length = x.size()[0] - 1
		total_loss = 0
		for i in range(self.seq_length):
			# prepare parameters
			self.__get_para(i, x, para)
			normalpdf = self.__para2normal(self.x1, self.x2, self.m1, self.m2, self.s1, self.s2, self.rho) #dim (n_batch, 20)
			single_loss = self.__singleLoss(normalpdf)
			total_loss += single_loss
		return total_loss
	else:
		raise Exception("x and para don't match")


def __get_para(self, i, x, para):
	'''
	Slice and process parameters to the right form.
	Implementing eq 18-23 of ref paper.
	'''
	self.batch = x.size()[1]
	self.e = torch.sigmoid(-para[i,:,0])  # eq 18
	self.parameters = para

	# slice remaining parameters and training inputs
	self.pi, self.m1, self.m2, self.s1, self.s2, self.rho = torch.split(self.parameters[i,:,1:], 20, dim = 1) # dim(batch, 20)
	self.x1 = x[i+1,:,0].resize(self.batch, 1)  # dim(batch, 1)
	self.x2 = x[i+1,:,1].resize(self.batch, 1)
	self.et = x[i+1,:,2].resize(self.batch, 1)
	
	## process parameters
	# pi
	max_pi = torch.max(self.pi, dim = 1)[0]
	max_pi = max_pi.expand_as(self.pi)
	diff = self.pi - max_pi
	red_sum = torch.sum(diff, dim = 1).expand_as(self.pi)
	self.pi = diff.div(red_sum)

	# sd
	self.s1 = self.s1.exp()
	self.s2 = self.s2.exp()

	# rho
	self.rho = self.rho.tanh()


	# reshape ground truth x1, x2 to match m1, m2 because broadcasting is currently not supported by pytorch
	self.x1 = self.x1.expand_as(self.m1)
	self.x2 = self.x2.expand_as(self.m2)


def __para2normal(self, x1, x2, m1, m2, s1, s2, rho):
	'''
	Implement eq 24, 25 of ref paper.
	All input with dim(1, batch, 20)
	'''
	norm1 = x1.sub(m1)
	norm2 = x2.sub(m2)
	s1s2 = torch.mul(s1, s2)
	z = torch.pow(torch.div(norm1, s1), 2) + torch.pow(torch.div(norm2, s2), 2) - \
		2*torch.div(torch.mul(rho, torch.mul(norm1, norm2)), s1s2)
	negRho = 1 - torch.pow(rho, 2)
	expPart = torch.exp(torch.div(-z, torch.mul(negRho, 2)))
	coef = 2*np.pi*torch.mul(s1s2, torch.sqrt(negRho))
	result = torch.div(expPart, coef)
	return result

	
def __singleLoss(self, normalpdf):
	'''
	Calculate loss for single time stamp. eq 26
	Input: normalpdf (1,n_batch, 20).
	'''
	epsilon = 1e-20  # floor of loss from mixture density component since initial loss could be zero
	mix_den_loss = torch.mul(self.pi, normalpdf)
	red_sum_loss = torch.sum(torch.log(mix_den_loss))  # sum for all batch
	end_loss = torch.sum(torch.log(torch.mul(self.e, self.et) + torch.mul(1-self.e, 1 - self.et)))
	total_loss = -red_sum_loss - end_loss
	
	return total_loss/self.batch

when I call loss(x, para), the following error comes up:
‘Loss’ object has no attribute ‘_forward_hooks’

What does the error message imply ?
Specifically, what is hook used for ?
Did I get the error because I have no parameters in this module ?

Hi,

Firstly let me say it would be nice if You could format all of your code, including init method.
Also, it would be helpful if You could provide a working example of how to replicate the error (the code doesn’t run)

Nevertheless, your error is caused by the fact that you don’t call Module class __init__. Your code should be:

class Loss(torch.nn.Module):
    def __init__(self):
        super(torch.nn.Module, self).__init__()

        ... # rest of the __init__ here
1 Like