【北上广深杭大厂AI算法面试题】深度学习篇…MobileNet 系列网络发展与高效性分析(附代码)(二)
欢迎铁子们点赞、关注、收藏!
祝大家逢考必过!逢投必中!上岸上岸上岸!upupup
大多数高校硕博生毕业要求需要参加学术会议,发表EI或者SCI检索的学术论文会议论文。详细信息可关注VX “
学术会议小灵通
”或参考学术信息专栏:https://blog.csdn.net/2401_89898861/article/details/145551342
ReLU6
处理激活值,避免信息丢失。class InvertedResidual(nn.Module):
def __init__(self, in_channels, out_channels, expansion=6, stride=1):
super(InvertedResidual, self).__init__()
hidden_dim = in_channels * expansion
self.use_residual = stride == 1 and in_channels == out_channels
self.conv = nn.Sequential(
nn.Conv2d(in_channels, hidden_dim, kernel_size=1, bias=False),
nn.BatchNorm2d(hidden_dim),
nn.ReLU6(inplace=True),
nn.Conv2d(hidden_dim, hidden_dim, kernel_size=3, stride=stride, padding=1, groups=hidden_dim, bias=False),
nn.BatchNorm2d(hidden_dim),
nn.ReLU6(inplace=True),
nn.Conv2d(hidden_dim, out_channels, kernel_size=1, bias=False),
nn.BatchNorm2d(out_channels)
)
def forward(self, x):
if self.use_residual:
return x + self.conv(x)
else:
return self.conv(x)
# 测试
x = torch.randn(1, 32, 224, 224)
model = InvertedResidual(32, 64)
y = model(x)
print(y.shape) # (1, 64, 224, 224)
class SqueezeExcitation(nn.Module):
def __init__(self, in_channels, reduction=4):
super(SqueezeExcitation, self).__init__()
self.se = nn.Sequential(
nn.AdaptiveAvgPool2d(1),
nn.Conv2d(in_channels, in_channels // reduction, kernel_size=1),
nn.ReLU(inplace=True),
nn.Conv2d(in_channels // reduction, in_channels, kernel_size=1),
nn.Sigmoid()
)
def forward(self, x):
return x * self.se(x)
# 测试
x = torch.randn(1, 64, 32, 32)
model = SqueezeExcitation(64)
y = model(x)
print(y.shape) # (1, 64, 32, 32)