Mobilenetv3代码

最近复现了一下Mobilenetv3代码

import torch
import torch.nn as nn
import torch.nn.functional as F


#----------------------------------------------------------------------
def _make_divisible(chennl, divisor, min_value=None):
    """
    为了保证每个通道是8的倍数
    """
    if min_value is None:
        min_value = divisor
    new_chennl = max(min_value, int(chennl + divisor / 2) // divisor * divisor)
    
    #为了保证每个通道下降率不能超过10%
    
    if new_chennl < chennl * 0.9:
        new_chennl += chennl
    
    return new_chennl

########################################################################
class relu(nn.Module):
    """"""

    #----------------------------------------------------------------------
    def __init__(self, inplace=True):
        super(relu, self).__init__()
        """Constructor"""
        
        self.relu = nn.ReLU(inplace=inplace)
    
    #----------------------------------------------------------------------
    def forward(self, x):
        """"""
        x = self.relu(x)
        
        return x
        
########################################################################
class h_sigmoid(nn.Module):
    """"""

    #----------------------------------------------------------------------
    def __init__(self, inplace=True):
        super(h_sigmoid, self).__init__()
        """Constructor"""
        self.relu_6 = nn.ReLU6(inplace=inplace)
        
    #----------------------------------------------------------------------
    def forward(self, x):
        """"""
        x = self.relu_6(x + 3) / 6
        
        return x
        
########################################################################
class h_swish(nn.Module):
    """"""

    #----------------------------------------------------------------------
    def __init__(self, inplace=True):
        super(h_swish, self).__init__()
        """Constructor"""
        self.h_sigmoid = h_sigmoid(inplace=inplace)
    
    #----------------------------------------------------------------------
    def forward(self, x):
        """"""
        x = x * self.h_sigmoid(x)
        
        return x
        
        
########################################################################
class senet(nn.Module):
    """"""

    #----------------------------------------------------------------------
    def __init__(self, chennl, reduction=4):
        super(senet, self).__init__()
        """Constructor"""
        self.avg_pool = nn.AdaptiveAvgPool2d(1)
        self.fc = nn.Sequential(
            nn.Linear(chennl, _make_divisible(chennl // reduction, 8)),
            nn.ReLU(inplace=True), 
            nn.Linear(_make_divisible(chennl // reduction, 8), chennl),
            h_sigmoid()
        )
        
    def forward(self, x):
        in_chennl, out_chennl, h, w = x.size()
        y = self.avg_pool(x).view(in_chennl, out_chennl)
        y = self.fc(y).view(in_chennl, out_chennl, 1, 1)
        
        return x * y
               
    
#----------------------------------------------------------------------
def conv_3_x_3_bn(in_place, out_place, stride):
    """"""
    return nn.Sequential(
        nn.Conv2d(in_place, out_place, 3, stride, 1, bias=False),
        nn.BatchNorm2d(out_place),
        h_swish()
    )

#----------------------------------------------------------------------
def conv_1_x_1_bn(in_place, out_place):
    """"""
    return nn.Sequential(
        nn.Conv2d(in_place, out_place, 1, 1, 0, bias=False),
        nn.BatchNorm2d(out_place),
        h_swish()
    )

########################################################################
class residual(nn.Module):
    """"""

    #----------------------------------------------------------------------
    def __init__(self, in_place, scale_place, out_place, k_size, stride, use_se, use_hs):
        super(residual, self).__init__()
        """Constructor"""
        
        assert stride in [1, 2]
        
        self.identity = in_place == out_place and stride == 1
        
        if in_place == scale_place:
            self.conv = nn.Sequential(
                nn.Conv2d(scale_place, scale_place, k_size, stride, padding=(k_size - 1) // 2, groups=scale_place, bias=False),
                nn.BatchNorm2d(scale_place),
                h_swish() if use_hs else nn.ReLU(inplace=True),
                senet(scale_place) if use_se else nn.Identity(),
                nn.Conv2d(scale_place, out_place, 1, 1, 0, bias=False),
                nn.BatchNorm2d(out_place)
            )
        else:
            self.conv = nn.Sequential(
                nn.Conv2d(in_place, scale_place, 1, 1, 0, bias=False),
                nn.BatchNorm2d(scale_place),
                h_swish() if use_sh else nn.ReLU(inplace=True),
                nn.Conv2d(scale_place, scale_place, k_size, stride, padding=(k_size - 1) // 2, groups=scale_place, bias=False),
                nn.BatchNorm2d(scale_place),
                senet(scale_place) if use_se else nn.Identity(), 
                h_swish() if use_hs else nn.ReLU(inplace=True),
                nn.Conv2d(scale_place, out_place, 1, 1, 0, bias=False), 
                nn.BatchNorm2d(out_place)
                
            )
    
    def foward(self, x):
        if self.identity:
            x += self.conv(x)
        else:
            x = self.conv(x)
        
        return x
    
    

########################################################################
class mobilentv3(nn.Module):
    """"""

    #----------------------------------------------------------------------
    def __init__(self, net_cfg, mode = False, num_class=False, fpn_ids=False, width_mult=1):
        super(mobilentv3, self).__init__()
        """Constructor"""
        self.net_cfg = net_cfg
        self.fpn_ids = fpn_ids
        
        in_chennl = _make_divisible(16 * width_mult, 8)
        layers = [conv_3_x_3_bn(3, in_channl, 2)]
        block = residual
        
        for k, scale, chennl, stride, use_se, use_hs, in self.net_cfg:
            out_place = _make_divisible(chennl * width_mult)
            scale_place = _make_divisible(in_channl * scale)
            layers.append(block(in_chennl, scale_place, out_place, k, stride, use_se, use_hs))
            in_chennl = out_place
        
        self.features = nn.Sequential(*layers)
        self.last_conv = conv_1_x_1_bn(in_chennl, scale_place)
        self.avg_pool = nn.AdaptiveAvgPool2d((1, 1))
        
        output_channel = {'large': 1280, 'small': 1024}
        output_channel = _make_divisible(
            output_channel[mode] * width_mult, 8) if width_mult > 1.0 else output_channel[mode]

        self.classifi = num_class != None
        if self.classifi:
            self.classifier = nn.Sequential(
                nn.Linear(scale_place, output_channel),
                h_swish(),
                nn.Dropout(0.2), 
                nn.Linear(output_channel, num_class)
            )
        self._init_weights()
        
    #----------------------------------------------------------------------
    def forward(self, x):
        """"""
        fnp_layers = []
        for i, l in enumerate(self.features):
            x = l(x)
            if self.fnp_ids:
                if i in fnp_ids:
                    fnp_layers.append(x)
                    
        if self.classifi:
            x = self.last_conv(x)
            x = self.avg_pool(x)
            x = x.view(x.size(0), -1)
            x = self.classifier(x)
            
            return x
        else:
            fnp_layers.insert(0, x)
            return fnp_layers
        
    
    #----------------------------------------------------------------------
    def _init_weights(self):
        """"""
        
        for m in self.modules():
            if isinstance(m, nn.Conv2d):
                n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
                m.weight.data.normal_(0, math.sqrt(2. / n))
            if m.bias is not None:
                m.bias.data.zero_()
            elif isinstance(m, nn.BatchNorm2d):
                m.weight.data.fill_(1)
                m.bias.data.zero_()
            elif isinstance(m, nn.Linear):
                n = m.weight.size(1)
                m.weight.data.normal_(0, 0.01)
                m.bias.data.zero_()

def mobilenetv3_large(**kwargs):
    """
    Constructs a MobileNetV3-Large model
    """
    cfgs = [
        # k, sclae, c, stride SE, HS
        [3,   1,  16, 1, 0, 0],
        [3,   4,  24, 2, 0, 0],
        [3,   3,  24, 1, 0, 0],
        [5,   3,  40, 2, 1, 0],
        [5,   3,  40, 1, 1, 0],
        [5,   3,  40, 1, 1, 0], # P3 5
        [3,   6,  80, 2, 0, 1],
        [3, 2.5,  80, 1, 0, 1],
        [3, 2.3,  80, 1, 0, 1],
        [3, 2.3,  80, 1, 0, 1], # P4 9
        [3,   6, 112, 1, 1, 1],
        [3,   6, 112, 1, 1, 1],
        [5,   6, 160, 2, 1, 1],
        [5,   6, 160, 1, 1, 1],
        [5,   6, 160, 1, 1, 1] # P5 14
    ]
    return MobileNetV3(cfgs, mode='large', **kwargs)


def mobilenetv3_small(**kwargs):
    """
    Constructs a MobileNetV3-Small model
    """
    cfgs = [
        # k, scale, c, stride SE, HS
        [3,    1,  16, 2, 1, 0],
        [3,  4.5,  24, 2, 0, 0],
        [3, 3.67,  24, 1, 0, 0],
        [5,    4,  40, 2, 1, 1],
        [5,    6,  40, 1, 1, 1],
        [5,    6,  40, 1, 1, 1],
        [5,    3,  48, 1, 1, 1],
        [5,    3,  48, 1, 1, 1],
        [5,    6,  96, 2, 1, 1],
        [5,    6,  96, 1, 1, 1],
        [5,    6,  96, 1, 1, 1],
    ]
      
    return MobileNetV3(cfgs, mode='small', **kwargs)          
        
        
    
    
    
        
        
    
    

你可能感兴趣的:(mobilenetv3,python,开发语言)