1、在yolov5/models下面新建一个SE.py文件,在里面放入下面的代码
代码如下:
import numpy as np
import torch
from torch import nn
from torch.nn import init
from torch.nn.parameter import Parameterclass ShuffleAttention(nn.Module):def __init__(self, channel=512, reduction=16, G=8):super().__init__()self.G = Gself.channel = channelself.avg_pool = nn.AdaptiveAvgPool2d(1)self.gn = nn.GroupNorm(channel // (2 * G), channel // (2 * G))self.cweight = Parameter(torch.zeros(1, channel // (2 * G), 1, 1))self.cbias = Parameter(torch.ones(1, channel // (2 * G), 1, 1))self.sweight = Parameter(torch.zeros(1, channel // (2 * G), 1, 1))self.sbias = Parameter(torch.ones(1, channel // (2 * G), 1, 1))self.sigmoid = nn.Sigmoid()def init_weights(self):for m in self.modules():if isinstance(m, nn.Conv2d):init.kaiming_normal_(m.weight, mode='fan_out')if m.bias is not None:init.constant_(m.bias, 0)elif isinstance(m, nn.BatchNorm2d):init.constant_(m.weight, 1)init.constant_(m.bias, 0)elif isinstance(m, nn.Linear):init.normal_(m.weight, std=0.001)if m.bias is not None:init.constant_(m.bias, 0)@staticmethoddef channel_shuffle(x, groups):b, c, h, w = x.shapex = x.reshape(b, groups, -1, h, w)x = x.permute(0, 2, 1, 3, 4)# flattenx = x.reshape(b, -1, h, w)return xdef forward(self, x):b, c, h, w = x.size()# group into subfeaturesx = x.view(b * self.G, -1, h, w) # bs*G,c//G,h,w# channel_splitx_0, x_1 = x.chunk(2, dim=1) # bs*G,c//(2*G),h,w# channel attentionx_channel = self.avg_pool(x_0) # bs*G,c//(2*G),1,1x_channel = self.cweight * x_channel + self.cbias # bs*G,c//(2*G),1,1x_channel = x_0 * self.sigmoid(x_channel)# spatial attentionx_spatial = self.gn(x_1) # bs*G,c//(2*G),h,wx_spatial = self.sweight * x_spatial + self.sbias # bs*G,c//(2*G),h,wx_spatial = x_1 * self.sigmoid(x_spatial) # bs*G,c//(2*G),h,w# concatenate along channel axisout = torch.cat([x_channel, x_spatial], dim=1) # bs*G,c//G,h,wout = out.contiguous().view(b, -1, h, w)# channel shuffleout = self.channel_shuffle(out, 2)return out
2、找到yolo.py文件,进行更改内容
在28行加一个from models SE import SEAttention
, 保存即可
3、找到自己想要更改的yaml文件,我选择的yolov5s.yaml文件(你可以根据自己需求进行选择),将刚刚写好的模块ShuffleAttention加入到yolov5s.yaml里面,并更改一些内容。更改如下
4、在yolo.py里面加入两行代码(335-337)
保存即可!
运行一下,发现出来了ShuffleAttention
到处完成,跑100epoch,不知道跑到什么时候!
结果还是下降