0
Follow
0
View

# Request reply as soon as possible, preferably QQ reply, bother you

dzfu123 注册会员
2023-02-27 22:31

Refer to GPT and my own ideas, you see if I modify the line,

``````import torch
import torch.nn as nn

def get_dwconv(c, k=3, s=True):
if s:
return nn.Sequential(
nn.Conv2d(c, c, k, stride=2, padding=k//2, groups=c, bias=False),
nn.BatchNorm2d(c),
nn.Conv2d(c, c, 1, stride=1, padding=0, bias=False),
nn.BatchNorm2d(c),
nn.ReLU(inplace=True)
)
else:
return nn.Sequential(
nn.Conv2d(c, c, k, stride=1, padding=k//2, groups=c, bias=False),
nn.BatchNorm2d(c),
nn.Conv2d(c, c, 1, stride=1, padding=0, bias=False),
nn.BatchNorm2d(c),
nn.ReLU(inplace=True)
)

class gnconv(nn.Module):
def __init__(self, dim, order=5, gflayer=None, h=14, w=8, s=1.0):
super(gnconv, self).__init__()
self.order = order
self.dims = [dim // 2 ** i for i in range(order)]
self.dims.reverse()
self.proj_in = nn.Conv2d(dim, 2 * dim, 1)
if gflayer is None:
self.dwconv = get_dwconv(sum(self.dims), 7, True)
else:
self.dwconv = gflayer(sum(self.dims), h=h, w=w)
self.proj_out = nn.Conv2d(dim, dim, 1)
self.pws = nn.ModuleList(
[nn.Conv2d(self.dims[i], self.dims[i+1], 1) for i in range(order-1)]
)
self.scale = s

fused_x = self.proj_in(x) # dummy
pwa, abc = torch.split(fused_x, (self.dims[0], sum(self.dims)), dim=1)
return pwa, abc
``````
cyan1986 注册会员
2023-02-27 22:31
ask=None, dummy=False): fused_x = self.proj_in(x) pwa, abc = torch.split(fused_x, (self.dims[0], sum(self.dims)), dim=1) abc = self.dwconv(abc) abc = torch.cat(torch.split(abc, self.dims[1:], dim=1), dim=1) abc = self.proj_out(abc) x = pwa + self.scale * abc return x .
< !- - - - - >
dsunzhaogong 注册会员
2023-02-27 22:31

Please copy the code out, do not send pictures

applemylover 注册会员

Publish Time
2023-02-27 22:31
Update Time
2023-02-27 22:31