Why can’t I use dropout on a GRU layer. I keep getting the following error:
AttributeError Traceback (most recent call last)
<ipython-input-37-ffcdd5187a2c> in <module>()
----> 1 pred = net(X)
~/miniconda3/lib/python3.6/site-packages/torch/nn/modules/module.py in __call__(self, *input, **kwargs)
323 for hook in self._forward_pre_hooks.values():
324 hook(self, input)
--> 325 result = self.forward(*input, **kwargs)
326 for hook in self._forward_hooks.values():
327 hook_result = hook(self, input, result)
<ipython-input-32-82932b82b557> in forward(self, x)
10 def forward(self, x):
11 x = self.layer1(x)
---> 12 x = self.layer2(x)
13 x = self.fc(x)
14
~/miniconda3/lib/python3.6/site-packages/torch/nn/modules/module.py in __call__(self, *input, **kwargs)
323 for hook in self._forward_pre_hooks.values():
324 hook(self, input)
--> 325 result = self.forward(*input, **kwargs)
326 for hook in self._forward_hooks.values():
327 hook_result = hook(self, input, result)
~/miniconda3/lib/python3.6/site-packages/torch/nn/modules/dropout.py in forward(self, input)
44
45 def forward(self, input):
---> 46 return F.dropout(input, self.p, self.training, self.inplace)
47
48 def __repr__(self):
~/miniconda3/lib/python3.6/site-packages/torch/nn/functional.py in dropout(input, p, training, inplace)
524
525 def dropout(input, p=0.5, training=False, inplace=False):
--> 526 return _functions.dropout.Dropout.apply(input, p, training, inplace)
527
528
~/miniconda3/lib/python3.6/site-packages/torch/nn/_functions/dropout.py in forward(cls, ctx, input, p, train, inplace)
30 output = input
31 else:
---> 32 output = input.clone()
33
34 if ctx.p > 0 and ctx.train:
AttributeError: 'tuple' object has no attribute 'clone'
Here’s a simple example:
nn.Sequential(
nn.GRU(10, 204),
nn.Dropout(0.5),
nn.Linear(204, 10)
)
What am I doing wrong here?