class Flatten(nn.Module):
def forward(self, input):
'''
Note that input.size(0) is usually the batch size.
So what it does is that given any input with input.size(0) # of batches,
will flatten to be 1 * nb_elements.
'''
batch_size = input.size(0)
out = input.view(batch_size,-1)
return out # (batch_size, *size)
class View(nn.Module):
def forward(self, input, shape):
'''
TODO: the first dimension is the data batch_size
so we need to decide how the input shape should be like
'''
return input.view(*shape)
class View(nn.Module):
def __init__(self, shape):
self.shape = shape
def forward(self, input):
'''
TODO: the first dimension is the data batch_size
so we need to decide how the input shape should be like
'''
return input.view(*self.shape)
If you want to use the View in a sequential yes. You have to do this. Because the Sequential only passes the output of the previous layer.
For your Flatten layer, it seem to work fine no?
import torch
from torch import nn
class Flatten(nn.Module):
def forward(self, input):
'''
Note that input.size(0) is usually the batch size.
So what it does is that given any input with input.size(0) # of batches,
will flatten to be 1 * nb_elements.
'''
batch_size = input.size(0)
out = input.view(batch_size,-1)
return out # (batch_size, *size)
print("2D input")
foo = torch.rand(10, 20)
print("Input size:")
print(foo.size())
bar = Flatten()(foo)
print("Output size:")
print(bar.size())
print("3D input")
foo = torch.rand(10, 20, 30)
print("Input size:")
print(foo.size())
bar = Flatten()(foo)
print("Output size:")
print(bar.size())
print("8D input")
foo = torch.rand(10, 2, 3, 4, 5, 6, 7, 8)
print("Input size:")
print(foo.size())
bar = Flatten()(foo)
print("Output size:")
print(bar.size())
class View(nn.Module):
def __init__(self, shape):
super().__init__()
self.shape = shape
def forward(self, input):
'''
Reshapes the input according to the shape saved in the view data structure.
'''
batch_size = input.size(0)
shape = (batch_size, *self.shape)
out = input.view(shape)
return out
File "/Users/pinocchio/anaconda3/lib/python3.7/site-packages/torch/nn/modules/module.py", line 488, in __call__
for hook in self._forward_pre_hooks.values():
File "/Users/pinocchio/anaconda3/lib/python3.7/site-packages/torch/nn/modules/module.py", line 539, in __getattr__
type(self).__name__, name))
AttributeError: 'View' object has no attribute '_forward_pre_hooks'
Make sure to properly call the parent __init__ function when creating your own nn.Module().
Also make sure that you don’t have any other class/function called View that could conflict with your new one.
class View(nn.Module):
def __init__(self, shape):
super().__init__()
self.shape = shape
def __repr__(self):
return f'View{self.shape}'
def forward(self, input):
'''
Reshapes the input according to the shape saved in the view data structure.
'''
batch_size = input.size(0)
shape = (batch_size, *self.shape)
out = input.view(shape)
return out