123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227 |
- import torch.nn as nn
- import math
- import torch.utils.model_zoo as model_zoo
- from ..layers import *
- model_urls = {
- 'resnet18': 'https://download.pytorch.org/models/resnet18-5c106cde.pth',
- 'resnet34': 'https://download.pytorch.org/models/resnet34-333f7ec4.pth',
- 'resnet50': 'https://download.pytorch.org/models/resnet50-19c8e357.pth',
- 'resnet101': 'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth',
- 'resnet152': 'https://download.pytorch.org/models/resnet152-b121ed2d.pth',
- }
- def conv3x3(in_planes, out_planes, stride=1):
- "3x3 convolution with padding"
- return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
- padding=1, bias=False)
- def bn1(planes):
- m = nn.BatchNorm1d(planes)
- m.weight.data.fill_(1)
- m.bias.data.zero_()
- return m
- def bn(planes, init_zero=False):
- m = nn.BatchNorm2d(planes)
- m.weight.data.fill_(0 if init_zero else 1)
- m.bias.data.zero_()
- return m
- class BasicBlock(nn.Module):
- expansion = 1
- def __init__(self, inplanes, planes, stride=1, downsample=None):
- super().__init__()
- self.conv1 = conv3x3(inplanes, planes, stride)
- self.bn1 = bn(planes)
- self.relu = nn.ReLU(inplace=True)
- self.conv2 = conv3x3(planes, planes)
- self.bn2 = bn(planes)
- self.downsample = downsample
- self.stride = stride
- def forward(self, x):
- residual = x
- if self.downsample is not None: residual = self.downsample(x)
- out = self.conv1(x)
- out = self.relu(out)
- out = self.bn1(out)
- out = self.conv2(out)
- out += residual
- out = self.relu(out)
- out = self.bn2(out)
- return out
- class BottleneckFinal(nn.Module):
- expansion = 4
- def __init__(self, inplanes, planes, stride=1, downsample=None):
- super().__init__()
- self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
- self.bn1 = bn(planes)
- self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride,
- padding=1, bias=False)
- self.bn2 = bn(planes)
- self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
- self.bn3 = bn(planes * 4)
- self.relu = nn.ReLU(inplace=True)
- self.downsample = downsample
- self.stride = stride
- def forward(self, x):
- residual = x
- if self.downsample is not None: residual = self.downsample(x)
- out = self.conv1(x)
- out = self.bn1(out)
- out = self.relu(out)
- out = self.conv2(out)
- out = self.bn2(out)
- out = self.relu(out)
- out = self.conv3(out)
- out += residual
- out = self.bn3(out)
- out = self.relu(out)
- return out
- class BottleneckZero(nn.Module):
- expansion = 4
- def __init__(self, inplanes, planes, stride=1, downsample=None):
- super().__init__()
- self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
- self.bn1 = bn(planes)
- self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride,
- padding=1, bias=False)
- self.bn2 = bn(planes)
- self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
- self.bn3 = bn(planes * 4, init_zero=True)
- self.relu = nn.ReLU(inplace=True)
- self.downsample = downsample
- self.stride = stride
- def forward(self, x):
- residual = x
- if self.downsample is not None: residual = self.downsample(x)
- out = self.conv1(x)
- out = self.bn1(out)
- out = self.relu(out)
- out = self.conv2(out)
- out = self.bn2(out)
- out = self.relu(out)
- out = self.conv3(out)
- out = self.bn3(out)
- out += residual
- out = self.relu(out)
- return out
- class Bottleneck(nn.Module):
- expansion = 4
- def __init__(self, inplanes, planes, stride=1, downsample=None):
- super().__init__()
- self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
- self.bn1 = bn(planes)
- self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride,
- padding=1, bias=False)
- self.bn2 = bn(planes)
- self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False)
- self.bn3 = bn(planes * 4)
- self.relu = nn.ReLU(inplace=True)
- self.downsample = downsample
- self.stride = stride
- def forward(self, x):
- residual = x
- if self.downsample is not None: residual = self.downsample(x)
- out = self.conv1(x)
- out = self.bn1(out)
- out = self.relu(out)
- out = self.conv2(out)
- out = self.bn2(out)
- out = self.relu(out)
- out = self.conv3(out)
- out = self.bn3(out)
- out += residual
- out = self.relu(out)
- return out
- class ResNet(nn.Module):
- def __init__(self, block, layers, num_classes=1000, k=1, vgg_head=False):
- super().__init__()
- self.inplanes = 64
- features = [nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, bias=False)
- , bn(64) , nn.ReLU(inplace=True) , nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
- , self._make_layer(block, int(64*k), layers[0])
- , self._make_layer(block, int(128*k), layers[1], stride=2)
- , self._make_layer(block, int(256*k), layers[2], stride=2)
- , self._make_layer(block, int(512*k), layers[3], stride=2)]
- out_sz = int(512*k) * block.expansion
- if vgg_head:
- features += [nn.AdaptiveAvgPool2d(3), Flatten()
- , nn.Linear(out_sz*3*3, 4096), nn.ReLU(inplace=True), bn1(4096), nn.Dropout(0.25)
- , nn.Linear(4096, 4096), nn.ReLU(inplace=True), bn1(4096), nn.Dropout(0.25)
- , nn.Linear(4096, num_classes)]
- else: features += [nn.AdaptiveAvgPool2d(1), Flatten(), nn.Linear(out_sz, num_classes)]
- self.features = nn.Sequential(*features)
- for m in self.modules():
- if isinstance(m, nn.Conv2d):
- n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
- m.weight.data.normal_(0, math.sqrt(2. / n))
- def _make_layer(self, block, planes, blocks, stride=1):
- downsample = None
- if stride != 1 or self.inplanes != planes * block.expansion:
- downsample = nn.Sequential(
- nn.Conv2d(self.inplanes, planes * block.expansion,
- kernel_size=1, stride=stride, bias=False),
- bn(planes * block.expansion),
- )
- layers = []
- layers.append(block(self.inplanes, planes, stride, downsample))
- self.inplanes = planes * block.expansion
- for i in range(1, blocks): layers.append(block(self.inplanes, planes))
- return nn.Sequential(*layers)
- def forward(self, x): return self.features(x)
- def load(model, pre, name):
- if pretrained: model.load_state_dict(model_zoo.load_url(model_urls[name]))
- return model
- def fa_resnet18(pretrained=False, **kwargs): return load(ResNet(BasicBlock, [2, 2, 2, 2], **kwargs), pretrained, 'resnet18')
- def fa_resnet34(pretrained=False, **kwargs): return load(ResNet(BasicBlock, [3, 4, 6, 3], **kwargs), pretrained, 'resnet34')
- def fa_resnet50(pretrained=False, **kwargs): return load(ResNet(Bottleneck, [3, 4, 6, 3], **kwargs), pretrained, 'resnet50')
- def fa_resnet101(pretrained=False, **kwargs): return load(ResNet(Bottleneck, [3, 4, 23, 3], **kwargs), pretrained, 'resnet101')
- def fa_resnet152(pretrained=False, **kwargs): return load(ResNet(Bottleneck, [3, 8, 36, 3], **kwargs), pretrained, 'resnet152')
- def bnf_resnet50 (): return ResNet(BottleneckFinal, [3, 4, 6, 3])
- def bnz_resnet50 (): return ResNet(BottleneckZero, [3, 4, 6, 3])
- def w5_resnet50 (): return ResNet(Bottleneck, [2, 3, 3, 2], k=1.5)
- def w25_resnet50(): return ResNet(Bottleneck, [3, 4, 4, 3], k=1.25)
- def w125_resnet50(): return ResNet(Bottleneck,[3, 4, 6, 3], k=1.125)
- def vgg_resnet50(): return ResNet(Bottleneck, [3, 4, 6, 3], vgg_head=True)
|