Stochastic Depth TypeError: forward() takes 2 positional arguments but 4 were given

Hi,

I’m implementing Stochastic Depth on ResNet. But got the TypeError in forward(). Is there a way to fix this?
Thank you in advance!

“”"
Resnet for cifar dataset.
Ported from
GitHub - facebookarchive/fb.resnet.torch: Torch implementation of ResNet from http://arxiv.org/abs/1512.03385 and training scripts
and
https://github.com/pytorch/vision/blob/master/torchvision/models/resnet.py
“”"
import torch
import torch.nn as nn
import math

all = [‘resnet’, ‘sto_depth_resnet’]

def conv3x3(in_planes, out_planes, stride=1):
“3x3 convolution with padding”
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False)

class BasicBlock(nn.Module):
expansion = 1

def __init__(self, inplanes, planes, stride=1, downsample=None):
    super(BasicBlock, self).__init__()
    self.conv1 = conv3x3(inplanes, planes, stride)
    self.bn1 = nn.BatchNorm2d(planes)
    self.relu = nn.ReLU(inplace=True)
    self.conv2 = conv3x3(planes, planes)
    self.bn2 = nn.BatchNorm2d(planes)
    self.downsample = downsample
    self.stride = stride

def forward(self, x):
    residual = x

    out = self.conv1(x)
    out = self.bn1(out)
    out = self.relu(out)

    out = self.conv2(out)
    out = self.bn2(out)

    if self.downsample is not None:
        residual = self.downsample(x)

    out += residual
    out = self.relu(out)

    return out

class Bottleneck(nn.Module):
expansion = 4

def __init__(self, inplanes, planes, stride=1, downsample=None):
    super(Bottleneck, self).__init__()
    self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
    self.bn1 = nn.BatchNorm2d(planes)
    self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
    self.bn2 = nn.BatchNorm2d(planes)
    self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
    self.bn3 = nn.BatchNorm2d(planes * 4)
    self.relu = nn.ReLU(inplace=True)
    self.downsample = downsample
    self.stride = stride

def forward(self, x):
    residual = x

    out = self.conv1(x)
    out = self.bn1(out)
    out = self.relu(out)

    out = self.conv2(out)
    out = self.bn2(out)
    out = self.relu(out)

    out = self.conv3(out)
    out = self.bn3(out)

    if self.downsample is not None:
        residual = self.downsample(x)

    out += residual
    out = self.relu(out)

    return out

class BasicBlock_stochastic_depth(nn.Module):
expansion = 1

def __init__(self, inplanes, planes, stride=1, downsample=None):
    super(BasicBlock_stochastic_depth, self).__init__()
    self.conv1 = conv3x3(inplanes, planes, stride)
    self.bn1 = nn.BatchNorm2d(planes)
    self.relu = nn.ReLU(inplace=True)
    self.conv2 = conv3x3(planes, planes)
    self.bn2 = nn.BatchNorm2d(planes)
    self.downsample = downsample
    self.stride = stride

def forward(self, x, active, prob):
    if self.training:
        if active == 1:
            print('active')
            residual = x
            out = self.conv1(x)
            out = self.bn1(out)
            out = self.relu(out)

            out = self.conv2(out)
            out = self.bn2(out)

            if self.downsample is not None:
                residual = self.downsample(x)

            out += residual
            out = self.relu(out)
        else:
            print('inactive')
            out = x
            if self.downsample is not None:
                out = self.downsample(out)
            out = self.relu(out)

    else:
        residual = x

        out = self.conv1(x)
        out = self.bn1(out)
        out = self.relu(out)

        out = self.conv2(out)
        out = self.bn2(out)

        if self.downsample is not None:
            residual = self.downsample(x)

        out = prob * out + residual
        out = self.relu(out)

    return out

class Bottleneck_stochastic_depth(nn.Module):
expansion = 4

def __init__(self, inplanes, planes, stride=1, downsample=None):
    super(Bottleneck_stochastic_depth, self).__init__()
    self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
    self.bn1 = nn.BatchNorm2d(planes)
    self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
    self.bn2 = nn.BatchNorm2d(planes)
    self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
    self.bn3 = nn.BatchNorm2d(planes * 4)
    self.relu = nn.ReLU(inplace=True)
    self.downsample = downsample
    self.stride = stride

def forward(self, x, active, prob):
    if self.training:
        if active == 1:
            print('active')
            residual = x
            out = self.conv1(x)
            out = self.bn1(out)
            out = self.relu(out)

            out = self.conv2(out)
            out = self.bn2(out)
            out = self.relu(out)

            out = self.conv3(out)
            out = self.bn3(out)

            if self.downsample is not None:
                residual = self.downsample(x)

            out += residual
            out = self.relu(out)
        else:
            print('inactive')
            out = x
            if self.downsample is not None:
                out = self.downsample(out)
            out = self.relu(out)
    else:
        residual = x

        out = self.conv1(x)
        out = self.bn1(out)
        out = self.relu(out)

        out = self.conv2(out)
        out = self.bn2(out)
        out = self.relu(out)

        out = self.conv3(out)
        out = self.bn3(out)

        if self.downsample is not None:
            residual = self.downsample(x)

        out = prob * out + residual
        out = self.relu(out)

    return out

class ResNet(nn.Module):

def __init__(self, depth, num_classes=1000, block_name='BasicBlock'):
    super(ResNet, self).__init__()
    # Model type specifies number of layers for CIFAR-10 model
    if block_name.lower() == 'basicblock':
        assert(depth - 2) % 6 == 0, 'When use basicblock, depth should be 6n+2, e.g. 20, 32, 44, 56, 110, 1202'
        n = (depth - 2) // 6
        block = BasicBlock
    elif block_name.lower() == 'bottleneck':
        assert (depth == 2) % 9 == 0, 'When use bottleneck, depth should be 9n + 2, e.g. 20, 29, 47, 56, 110, 1199'
        n = (depth - 2) // 9
        block = Bottleneck
    else:
        raise ValueError('block_name should be Basicblock or Bottleneck')

    self.inplanes = 16
    self.conv1 = nn.Conv2d(3, 16, kernel_size=3, padding=1, bias=False)
    self.bn1 = nn.BatchNorm2d(16)
    self.relu = nn.ReLU(inplace=True)
    self.layer1 = self._make_layer(block, 16, n)
    self.layer2 = self._make_layer(block, 32, n, stride=2)
    self.layer3 = self._make_layer(block, 64, n, stride=2)
    self.avgpool = nn.AvgPool2d(8)
    self.fc = nn.Linear(64 * block.expansion, num_classes)

    for m in self.modules():
        if isinstance(m, nn.Conv2d):
            n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
            m.weight.data.normal_(0, math.sqrt(2./n))
        elif isinstance(m, nn.BatchNorm2d):
            m.weight.data.fill_(1)
            m.bias.data.zero_()

def _make_layer(self, block, planes, blocks, stride=1):
    downsample = None
    if stride != 1 or self.inplanes != planes * block.expansion:
        downsample = nn.Sequential(
            nn.Conv2d(self.inplanes, planes * block.expansion,
                      kernel_size=1, stride=stride, bias=False),
            nn.BatchNorm2d(planes * block.expansion)
        )

    layers = []
    layers.append(block(self.inplanes, planes, stride, downsample))
    self.inplanes = planes * block.expansion
    for i in range(1, blocks):
        layers.append(block(self.inplanes, planes))

    return nn.Sequential(*layers)

def forward(self, x):
    x = self.conv1(x)
    x = self.bn1(x)
    x = self.relu(x)

    x = self.layer1(x)
    x = self.layer2(x)
    x = self.layer3(x)

    x = self.avgpool(x)
    x = x.view(x.size(0), -1)
    x = self.fc(x)

    return x

class ResNetSD(nn.Module):

def __init__(self, depth, num_classes=1000, block_name='BasicBlock'):
    super(ResNetSD, self).__init__()
    # Model type specifies number of layers for CIFAR-10 model
    if block_name.lower() == 'basicblock':
        assert(depth - 2) % 6 == 0, 'When use basicblock, depth should be 6n+2, e.g. 20, 32, 44, 56, 110, 1202'
        n = (depth - 2) // 6
        block = BasicBlock_stochastic_depth
    elif block_name.lower() == 'bottleneck':
        assert (depth == 2) % 9 == 0, 'When use bottleneck, depth should be 9n + 2, e.g. 20, 29, 47, 56, 110, 1199'
        n = (depth - 2) // 9
        block = Bottleneck_stochastic_depth
    else:
        raise ValueError('block_name should be Basicblock or Bottleneck')

    self.probabilities = torch.tensor(0.5)
    self.actives = torch.bernoulli(self.probabilities)

    self.inplanes = 16
    self.conv1 = nn.Conv2d(3, 16, kernel_size=3, padding=1, bias=False)
    self.bn1 = nn.BatchNorm2d(16)
    self.relu = nn.ReLU(inplace=True)
    self.layer1 = self._make_layer(block, 16, n)
    self.layer2 = self._make_layer(block, 32, n, stride=2)
    self.layer3 = self._make_layer(block, 64, n, stride=2)
    self.avgpool = nn.AvgPool2d(8)
    self.fc = nn.Linear(64 * block.expansion, num_classes)

    for m in self.modules():
        if isinstance(m, nn.Conv2d):
            n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
            m.weight.data.normal_(0, math.sqrt(2./n))
        elif isinstance(m, nn.BatchNorm2d):
            m.weight.data.fill_(1)
            m.bias.data.zero_()

def _make_layer(self, block, planes, blocks, stride=1):
    downsample = None
    if stride != 1 or self.inplanes != planes * block.expansion:
        downsample = nn.Sequential(
            nn.Conv2d(self.inplanes, planes * block.expansion,
                      kernel_size=1, stride=stride, bias=False),
            nn.BatchNorm2d(planes * block.expansion)
        )

    layers = []
    layers.append(block(self.inplanes, planes, stride, downsample))
    self.inplanes = planes * block.expansion
    for i in range(1, blocks):
        layers.append(block(self.inplanes, planes))

    return nn.Sequential(*layers)

def forward(self, x):
    x = self.conv1(x)
    x = self.bn1(x)
    x = self.relu(x)

    x = self.layer1(x, self.actives, self.probabilities)
    x = self.layer2(x, self.actives, self.probabilities)
    x = self.layer3(x, self.actives, self.probabilities)

    x = self.avgpool(x)
    x = x.view(x.size(0), -1)
    x = self.fc(x)

    return x

def resnet(**kwargs):
“”"
Constructs a ResNet model.
:param kwargs:
:return:
“”"
return ResNet(**kwargs)

def sto_depth_resnet(**kwargs):
return ResNetSD(**kwargs)

if name == “main”:
import timeit

batch_size = 512
model = sto_depth_resnet(depth=20, num_classes=1000)

data = torch.rand(batch_size, 3, 32, 32).cuda()
print('data.device = ', data.device)

model = torch.nn.DataParallel(model).cuda()
for m in model.modules():
    print(m)

start = timeit.default_timer()
out = model(data)
end = timeit.default_timer()

print('Done.')