__init__() missing 1 required positional argument: 'num_features'

When i am trying to run ResNet architecture i am getting num_features argument missing. i couldn’t figure out where the problem is

import torch
import torch.nn as nn
from numpy.random import normal
from numpy.linalg import svd
from math import sqrt
import torch.nn.init
from .common import *

class ResidualSequential(nn.Sequential):
    def __init__(self, *args):
        super(ResidualSequential, self).__init__(*args)

    def forward(self, x):
        out = super(ResidualSequential, self).forward(x)
        # print(x.size(), out.size())
        x_ = None
        if out.size(2) != x.size(2) or out.size(3) != x.size(3):
            diff2 = x.size(2) - out.size(2)
            diff3 = x.size(3) - out.size(3)
            # print(1)
            x_ = x[:, :, diff2 /2:out.size(2) + diff2 / 2, diff3 / 2:out.size(3) + diff3 / 2]
        else:
            x_ = x
        return out + x_

    def eval(self):
        print(2)
        for m in self.modules():
            m.eval()
        exit()


def get_block(num_channels, norm_layer, act_fun):
    layers = [
        nn.Conv2d(num_channels, num_channels, 3, 1, 1, bias=False),
        norm_layer(num_channels, affine=True),
        act(act_fun),
        nn.Conv2d(num_channels, num_channels, 3, 1, 1, bias=False),
        norm_layer(num_channels, affine=True),
    ]
    return layers


class ResNet(nn.Module):
    def __init__(self, num_input_channels, num_output_channels, num_blocks, num_channels, need_residual=True, act_fun='LeakyReLU', need_sigmoid=True, norm_layer=nn.BatchNorm2d, pad='reflection'):
        '''
            pad = 'start|zero|replication'
        '''
        super(ResNet, self).__init__()

        if need_residual:
            s = ResidualSequential
        else:
            s = nn.Sequential

        stride = 1
        # First layers
        layers = [
            # nn.ReplicationPad2d(num_blocks * 2 * stride + 3),
            conv(num_input_channels, num_channels, 3, stride=1, bias=True, pad=pad),
            act(act_fun)
        ]
        # Residual blocks
        # layers_residual = []
        for i in range(num_blocks):
            layers += [s(*get_block(num_channels, norm_layer, act_fun))]
       
        layers += [
            nn.Conv2d(num_channels, num_channels, 3, 1, 1),
            norm_layer(num_channels, affine=True)
        ]

        # if need_residual:
        #     layers += [ResidualSequential(*layers_residual)]
        # else:
        #     layers += [Sequential(*layers_residual)]

        # if factor >= 2: 
        #     # Do upsampling if needed
        #     layers += [
        #         nn.Conv2d(num_channels, num_channels *
        #                   factor ** 2, 3, 1),
        #         nn.PixelShuffle(factor),
        #         act(act_fun)
        #     ]
        layers += [
            conv(num_channels, num_output_channels, 3, 1, bias=True, pad=pad),
            nn.Sigmoid()
        ]
        self.model = nn.Sequential(*layers)

    def forward(self, input):
        return self.model(input)

    def eval(self):
        self.model.eval()
    input_depth = 32 
    figsize = 4 
    
    
    net = get_net(input_depth, 'ResNet', pad, 
                  num_scales=5,
                  upsample_mode='bilinear').type(dtype)

Error:

---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-7-9bb0b8e81004> in <module>()
     18     net = get_net(input_depth, 'ResNet', pad, 
     19                   num_scales=5,
---> 20                   upsample_mode='bilinear').type(dtype)
     21 
     22 else:

2 frames
/content/models/__init__.py in get_net(input_depth, NET_TYPE, pad, upsample_mode, n_channels, act_fun, skip_n33d, skip_n33u, skip_n11, num_scales, downsample_mode)
      9     if NET_TYPE == 'ResNet':
     10         # TODO
---> 11         net = ResNet(input_depth, 3, 10, 16, 1, nn.BatchNorm2d, False)
     12     elif NET_TYPE == 'skip':
     13         net = skip(input_depth, n_channels, num_channels_down = [skip_n33d]*num_scales if isinstance(skip_n33d, int) else skip_n33d,

/content/models/resnet.py in __init__(self, num_input_channels, num_output_channels, num_blocks, num_channels, need_residual, act_fun, need_sigmoid, norm_layer, pad)
     59             # nn.ReplicationPad2d(num_blocks * 2 * stride + 3),
     60             conv(num_input_channels, num_channels, 3, stride=1, bias=True, pad=pad),
---> 61             act(act_fun)
     62         ]
     63         # Residual blocks

/content/models/common.py in act(act_fun)
     90             assert False
     91     else:
---> 92         return act_fun()
     93 
     94 

TypeError: __init__() missing 1 required positional argument: 'num_features'

The issue seems to be raised by act(act_fun), which seems to be a custom method defined in .common, so you might want to check its implementations, as it’s not shared here.
If you get stuck, please post a minimal, executable code snippet to reproduce this issue.