Understood! You can do this:
import torch, torchvision
from torch import nn
from PIL import Image
model = torch.hub.load('pytorch/vision:v0.10.0', 'resnet18', pretrained=True)
n_inputs = 512
n_classes = 5
model.fc = torch.nn.Sequential( # Suppose this is your sequential model
nn.Linear(n_inputs, 256), # fc[0]
nn.ReLU(), # fc[1]
nn.Dropout(0.4), # fc[2]
nn.Linear(256, 128), # fc[3]
nn.Linear(128, n_classes), # fc[4]
)
# the above ends with n_classes=5, which you don't want
# instead, you want to end with the layer outputting 128
# so let's replace the Sequential with one
# that doesn't have the last layer
# but has all the prior layers
model.fc = nn.Sequential(
model.fc[0],
model.fc[1],
model.fc[2],
model.fc[3],
)
# let's test if this indeed returns 128 'classes'
img = Image.new('RGB', (224, 224)) # input image
input_tensor = torchvision.transforms.ToTensor()(img) # convert to tensor first
output = model(input_tensor.unsqueeze(0)) # add bogus "batch" dimension
output_to_save = output.detach().cpu().numpy() # convert to numpy
print(output_to_save)
np.save("output.npy", output_to_save) # save it like this
Output:
[[ 0.01438853 0.87196857 -0.2514455 -0.04714714 0.07090778 -0.09341301
-0.2072835 -0.3704936 -0.01389392 0.18359633 0.10641675 -0.11172692
0.6255953 -0.04955412 -0.05233074 -0.23172098 -0.64670634 0.01767935
-0.39153028 0.09038768 -0.4336232 0.5385093 0.09623256 0.05085237
0.141657 0.10092676 -0.35657144 -0.31972992 0.8487682 -0.10586535
0.02011631 0.34999335 -0.07444657 -0.06364524 0.68986833 -0.26194543
-0.10996548 -0.4478173 0.08224907 -0.22302929 0.08527138 0.34809947
0.31143022 0.4006711 0.06359468 0.2257615 0.42476267 0.30963692
0.12626037 -0.25697252 0.16672882 -0.06121482 -0.00894236 -0.13477468
-0.28371924 -0.5638889 0.23877671 -0.55630106 -0.19719738 0.15854624
0.07817743 -0.3680797 0.21859777 0.01053216 0.4597964 -0.37222108
-0.1515545 0.13200204 -0.34337598 -0.09385173 0.2707774 0.18420342
-0.14283875 0.3390723 -0.25316817 -0.22421657 -0.7452298 0.08496317
0.07830979 -0.06452103 -0.19457048 -0.10110219 0.19297805 0.35950035
-0.3854925 0.19778475 0.12232781 -0.04913102 0.40340108 0.6487622
0.03393503 0.31627747 0.4140402 -0.6647738 -0.45569345 -0.51362383
-0.8327177 0.0949322 -0.0045674 -0.23051375 -0.11645541 -0.45121124
-0.01439564 -0.0192363 -0.22718215 -0.00993027 0.09125008 -0.13285461
-0.20360185 0.47607702 0.11188114 0.1484635 0.18721539 -0.36961102
-0.09044638 -0.2350381 0.41334015 -0.2741512 -0.39748335 0.15688536
0.11036351 -0.60628456 0.3990275 -0.340592 0.5213363 -0.06208746
0.34663257 -0.08785937]] # 128 elements here!
Also see this discussion, where a similar question is answered.
Hope this helps!