How do I print the output of QuantReLU3?

class Network(nn.Module):

def __init__(self,):

    super(Network, self).__init__()

    self.conv1 = qnn.QuantConv2d(in_channels=3, out_channels=12, kernel_size=5, stride=1, weight_bit_width=3, padding=1)

    self.bn1 = nn.BatchNorm2d(12)

    self.quantReLU1 = qnn.QuantReLU(bit_width=3)

    self.conv2 = qnn.QuantConv2d(in_channels=12, out_channels=12, kernel_size=5, stride=1, weight_bit_width=3, padding=1)

    self.bn2 = nn.BatchNorm2d(12)

    self.pool = nn.MaxPool2d(2,2)

    self.quantReLU2 = qnn.QuantReLU(bit_width=3)

    self.conv3 = qnn.QuantConv2d(in_channels=12, out_channels=24, kernel_size=5, stride=1, weight_bit_width=3, padding=1)

    self.bn3 = nn.BatchNorm2d(24)

    self.quantReLU3 = qnn.QuantReLU(bit_width=3)

    self.conv4 = qnn.QuantConv2d(in_channels=24, out_channels=24, kernel_size=5, stride=1, weight_bit_width=3, padding=1)

    self.bn4 = nn.BatchNorm2d(24)

    self.quantReLU4 = qnn.QuantReLU(bit_width=3)

    self.fc1 = nn.Linear(24*10*10, 10)

def forward(self, input):

    output = self.quantReLU1(self.bn1(self.conv1(input)))      

    output = self.quantReLU2(self.bn2(self.conv2(output)))     

    output = self.pool(output)                        

    output = self.quantReLU3(self.bn3(self.conv3(output)))     

    output = self.quantReLU4(self.bn4(self.conv4(output)))     

    output = output.view(-1, 24*10*10)

    output = self.fc1(output)

    return output

@ptrblck can you help me?

You can add a print statement directly after its usage:

output = self.quantReLU3(self.bn3(self.conv3(output)))  
print(output)
...

Thank you very much!

Another question @ptrblck, I need to save the QuantReLU3 output every mini batch, how to do it?

You could return it in the forward method with the last layer’s output and save it.

Thank you very much!

@ptrblck,

In place of fc3, how would it be to hook after ReLU3? and insert into the training loop?

"activation = {}
def get_activation(name):
def hook(model, input, output):
activation[name] = output.detach()
return hook

model.fc3.register_forward_hook(get_activation(‘fc3’))
output = model(x)
activation[‘fc3’]"

Training function. We simply have to loop over our data iterator and feed the inputs to the network and optimize.

def train(num_epochs):

best_accuracy = 0.0

# Define your execution device

device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")

print("The model will be running on", device, "device")

# Convert model parameters and buffers to CPU or Cuda

model.to(device)

for epoch in range(num_epochs):  # loop over the dataset multiple times

    running_loss = 0.0

    running_acc = 0.0

    for i, (images, labels) in enumerate(train_loader, 0):

        

        # get the inputs

        images = Variable(images.to(device))

        labels = Variable(labels.to(device))

        # zero the parameter gradients

        optimizer.zero_grad()

        # predict classes using images from the training set

        outputs = model(images)

        

        # compute the loss based on model output and real labels

        loss = loss_fn(outputs, labels)

        # backpropagate the loss

        loss.backward()

        # adjust parameters based on the calculated gradients

        optimizer.step()

        # Let's print statistics for every 1,000 images

        running_loss += loss.item()     # extract the loss value

        if i % 1000 == 999:    

            # print every 1000 (twice per epoch) 

            print('[%d, %5d] loss: %.3f' %

                  (epoch + 1, i + 1, running_loss / 1000))

            # zero the loss

            running_loss = 0.0

    # Compute and print the average accuracy fo this epoch when tested over all 10000 test images

    accuracy = testAccuracy()

    print('For epoch', epoch+1,'the test accuracy over the whole test set is %d %%' % (accuracy))

    

    # we want to save the model if the accuracy is the best

    if accuracy > best_accuracy:

        saveModel()

        best_accuracy = accuracy

If you want to add a forward hook to quantReLU3 use model.quantReLU3.register_forward_hook instead of model.fc3.