I would like to change the number of bits for weights and activation during testing. The network is pretrained and .pth is available which is usually 32 bit as per my understanding. Lets say i have the following network which is already trained.
class AlexNet(nn.Module):
def __init__(self, num_classes=1000):
super(AlexNet, self).__init__()
self.features = nn.Sequential(
nn.Conv2d(3, 64, kernel_size=11, stride=4, padding=2),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=3, stride=2),
nn.Conv2d(64, 192, kernel_size=5, padding=2),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=3, stride=2),
nn.Conv2d(192, 384, kernel_size=3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(384, 256, kernel_size=3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(256, 256, kernel_size=3, padding=1),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=3, stride=2),
)
self.avgpool = nn.AdaptiveAvgPool2d((6, 6))
self.classifier = nn.Sequential(
nn.Dropout(),
nn.Linear(256 * 6 * 6, 4096),
nn.ReLU(inplace=True),
nn.Dropout(),
nn.Linear(4096, 4096),
nn.ReLU(inplace=True),
nn.Linear(4096, num_classes),
)
def forward(self, x):
x = self.features(x)
x = self.avgpool(x)
x = torch.flatten(x, 1)
x = self.classifier(x)
return x
model = AlexNet()
state_dict = load_state_dict_from_url(model_urls['alexnet'],
progress=True)
model.load_state_dict(state_dict)
For Testing:
I want to change the activation’s of this network to be 9-8-5-5-7 bits which are the Relu’s in self.features per my understanding and weights to be 11 bit in all convolution layers.
I also want to change to Fully Connected layers weigh to be 10-9-9 bit respectively.
Can someone please point me to the right direction or share a code snippet. Thanking in anticipation.