-
Notifications
You must be signed in to change notification settings - Fork 12
/
Copy pathmodels_inPaper.py
166 lines (131 loc) · 5.31 KB
/
models_inPaper.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
"""
Slim-neck by GSConv: A better design paradigm of detector architectures for autonomous vehicles
paper: https://arxiv.org/ftp/arxiv/papers/2206/2206.02424.pdf
"""
import torch
import torch.nn as nn
import math
# GSConvE test
class GSConvE(nn.Module):
'''
GSConv enhancement for representation learning: generate various receptive-fields and
texture-features only in one Conv module
https://github.com/AlanLi1997/slim-neck-by-gsconv
'''
def __init__(self, c1, c2, k=1, s=1, g=1, act=True):
super().__init__()
c_ = c2 // 4
self.cv1 = Conv(c1, c_, k, s, None, g, act)
self.cv2 = Conv(c_, c_, 9, 1, None, c_, act)
self.cv3 = Conv(c_, c_, 13, 1, None, c_, act)
self.cv4 = Conv(c_, c_, 17, 1, None, c_, act)
def forward(self, x):
x1 = self.cv1(x)
x2 = self.cv2(x1)
x3 = self.cv3(x1)
x4 = self.cv4(x1)
y = torch.cat((x1, x2, x3, x4), dim=1)
# shuffle
y = y.reshape(y.shape[0], 2, y.shape[1] // 2, y.shape[2], y.shape[3])
y = y.permute(0, 2, 1, 3, 4)
return y.reshape(y.shape[0], -1, y.shape[3], y.shape[4])
def autopad(k, p=None): # kernel, padding
# Pad to 'same'
if p is None:
p = k // 2 if isinstance(k, int) else [x // 2 for x in k] # auto-pad
return p
class Conv(nn.Module):
# C_B_M
def __init__(self, c1, c2, k=1, s=1, p=None, g=1, act=True):
super().__init__()
self.conv = nn.Conv2d(c1, c2, k, s, autopad(k, p), groups=g, bias=False)
self.bn = nn.BatchNorm2d(c2)
self.act = nn.Mish() if act else (act if isinstance(act, nn.Module) else nn.Identity())
def forward(self, x):
return self.act(self.bn(self.conv(x)))
def forward_fuse(self, x):
return self.act(self.conv(x))
class GSConv(nn.Module):
# GSConv https://github.com/AlanLi1997/slim-neck-by-gsconv
def __init__(self, c1, c2, k=1, s=1, g=1, act=True):
super().__init__()
c_ = c2 // 2
self.cv1 = Conv(c1, c_, k, s, None, g, act)
self.cv2 = Conv(c_, c_, 5, 1, 2, c_, act)
def forward(self, x):
x1 = self.cv1(x)
x2 = torch.cat((x1, self.cv2(x1)), 1)
# shuffle
y = x2.reshape(x2.shape[0], 2, x2.shape[1] // 2, x2.shape[2], x2.shape[3])
y = y.permute(0, 2, 1, 3, 4)
return y.reshape(y.shape[0], -1, y.shape[3], y.shape[4])
class GSConvns(GSConv):
# GSConv with a normative-shuffle https://github.com/AlanLi1997/slim-neck-by-gsconv
def __init__(self, c1, c2, k=1, s=1, g=1, act=True):
super().__init__(c1, c2, k=1, s=1, g=1, act=True)
c_ = c2 // 2
self.shuf = nn.Conv2d(c_ * 2, c2, 1, 1, 0, bias=False)
def forward(self, x):
x1 = self.cv1(x)
x2 = torch.cat((x1, self.cv2(x1)), 1)
# normative-shuffle, TRT supported
return nn.ReLU(self.shuf(x2))
class GSBottleneck(nn.Module):
# GS Bottleneck https://github.com/AlanLi1997/slim-neck-by-gsconv
def __init__(self, c1, c2, k=3, s=1):
super().__init__()
c_ = c2 // 2
# for lighting
self.conv_lighting = nn.Sequential(
GSConv(c1, c_, 1, 1),
GSConv(c_, c2, 3, 1, act=False))
self.shortcut = Conv(c1, c2, 1, 1, act=False)
def forward(self, x):
return self.conv_lighting(x) + self.shortcut(x)
class DWConv(Conv):
# Depth-wise convolution class
def __init__(self, c1, c2, k=1, s=1, act=True): # ch_in, ch_out, kernel, stride, padding, groups
super().__init__(c1, c2, k, s, g=math.gcd(c1, c2), act=act)
class GSBottleneckC(GSBottleneck):
# cheap GS Bottleneck https://github.com/AlanLi1997/slim-neck-by-gsconv
def __init__(self, c1, c2, k=3, s=1):
super().__init__(c1, c2, k, s)
self.shortcut = DWConv(c1, c2, 3, 1, act=False)
class VoVGSCSP(nn.Module):
# VoVGSCSP module with GSBottleneck
def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5):
super().__init__()
c_ = int(c2 * e) # hidden channels
self.cv1 = Conv(c1, c_, 1, 1)
self.cv2 = Conv(c1, c_, 1, 1)
# self.gc1 = GSConv(c_, c_, 1, 1)
# self.gc2 = GSConv(c_, c_, 1, 1)
self.gsb = GSBottleneck(c_, c_, 1, 1)
self.res = Conv(c_, c_, 3, 1, act=False)
self.cv3 = Conv(2*c_, c2, 1) #
def forward(self, x):
x1 = self.gsb(self.cv1(x))
y = self.cv2(x)
return self.cv3(torch.cat((y, x1), dim=1))
class VoVGSCSPC(VoVGSCSP):
# cheap VoVGSCSP module with GSBottleneck
def __init__(self, c1, c2, n=1, shortcut=True, g=1, e=0.5):
super().__init__(c1, c2, e)
c_ = int(c2 * e) # hidden channels
self.gsb = GSBottleneckC(c_, c_, 3, 1)
'''
class ChannelShuffle(nn.Module):
# ChannelShuffle from ShuffleNetv2 https://github.com/miaow1988/ShuffleNet_V2_pytorch_caffe/blob/master
def __init__(self, groups):
super(ChannelShuffle, self).__init__()
self.groups = groups
def forward(self, x):
x = x.reshape(x.shape[0], self.groups, x.shape[1] // self.groups, x.shape[2], x.shape[3])
x = x.permute(0, 2, 1, 3, 4)
x = x.reshape(x.shape[0], -1, x.shape[3], x.shape[4])
return x
def generate_caffe_prototxt(self, caffe_net, layer):
layer = L.ShuffleChannel(layer, group=self.groups)
caffe_net[self.g_name] = layer
return layer
'''