
1. 主干网head版本1
import torch
from torch import nn
import torch.nn.functional as F
class ConvBnLeakRelu(nn.Module):
def __init__(self,in_ch,out_ch,k=3,s=1):
super().__init__()
self.layer=nn.Sequential(
nn.Conv2d(in_ch, out_ch, kernel_size=k, padding=k // 2,stride=s),
nn.BatchNorm2d(out_ch),
nn.LeakyReLU(),
)
def forward(self,x):
return self.layer(x)
class Resnet(nn.Module):
def __init__(self,ch):
super().__init__()
self.resblock=nn.Sequential(
ConvBnLeakRelu(ch,ch//2,k=1),
ConvBnLeakRelu(ch//2,ch,k=3)
)
def forward(self,x):
return F.relu(self.resblock(x)+x)
class Unsample(nn.Module):
def __init__(self,in_ch,out_ch):
super().__init__()
self.layer=ConvBnLeakRelu(in_ch,out_ch,k=3,s=2)
def forward(self,x):
return self.layer(x)
class Net(nn.Module):
def __init__(self):
super().__init__()
self.inpuiut_layer=nn.Conv2d(3,32,3,1)
self.layer1=nn.Sequential(
Unsample(32,64),
Resnet(64),
)
self.layer2=nn.Sequential(
Unsample(64, 128),
Resnet(128),
Resnet(128),
)
self.layer3 = nn.Sequential(
Unsample(128, 256),
Resnet(256),
Resnet(256),
Resnet(256),
Resnet(256),
Resnet(256),
Resnet(256),
Resnet(256),
Resnet(256),
)
self.layer4 = nn.Sequential(
Unsample(256, 512),
Resnet(512),
Resnet(512),
Resnet(512),
Resnet(512),
Resnet(512),
Resnet(512),
Resnet(512),
Resnet(512),
)
self.layer5 = nn.Sequential(
Unsample(512, 1024),
Resnet(1024),
Resnet(1024),
Resnet(1024),
Resnet(1024),
)
def forward(self,x):
x=self.inpuiut_layer(x)
x=self.layer1(x)
x=self.layer2(x)
x_52=self.layer3(x)
x_26=self.layer4(x_52)
x_13=self.layer5(x_26)
return x_52,x_26,x_13
if __name__ == '__main__':
x=torch.randn(1,3,416,416)
net=Net()
out=net(x)
for i in out:
print(i.shape)

2. 主干网head版本2
import torch from torch import nn import torch.nn.functional as F class ConvBnLeakRelu(nn.Module): def __init__(self,in_ch,out_ch,k=3,s=1): super().__init__() self.layer=nn.Sequential( nn.Conv2d(in_ch, out_ch, kernel_size=k, padding=k // 2,stride=s), nn.BatchNorm2d(out_ch), nn.LeakyReLU(), ) def forward(self,x): return self.layer(x) class Resnet(nn.Module): def __init__(self,ch): super().__init__() self.resblock=nn.Sequential( ConvBnLeakRelu(ch,ch//2,k=1), ConvBnLeakRelu(ch//2,ch,k=3) ) def forward(self,x): return F.relu(self.resblock(x)+x) class Unsample(nn.Module): def __init__(self,in_ch,out_ch): super().__init__() self.layer=ConvBnLeakRelu(in_ch,out_ch,k=3,s=2) def forward(self,x): return self.layer(x) class Net(nn.Module): def __init__(self): super().__init__() self.inpuiut_layer=nn.Conv2d(3,32,3,1) self.layer1=self.make_layer(32,64,1) self.layer2=self.make_layer(64, 128,2) self.layer3=self.make_layer(128, 256,8) self.layer4=self.make_layer(256, 512,8) self.layer5=self.make_layer(512, 1024,4) def make_layer(self,in_ch,out_ch,block_num): layers=[] layers+=[Unsample(in_ch, out_ch)] for i in range(block_num): layers+=[Resnet(out_ch)] return nn.Sequential(*layers) def forward(self,x): x=self.inpuiut_layer(x) x=self.layer1(x) x=self.layer2(x) x_52=self.layer3(x) x_26=self.layer4(x_52) x_13=self.layer5(x_26) return x_52,x_26,x_13 if __name__ == '__main__': x=torch.randn(1,3,416,416) net=Net() out=net(x) for i in out: print(i.shape)


3. 将网络层全部放入数组切片取输出

import torch from torch import nn import torch.nn.functional as F class ConvBnLeakRelu(nn.Module): def __init__(self, in_ch, out_ch, k=3, s=1): super().__init__() self.layer = nn.Sequential( nn.Conv2d(in_ch, out_ch, kernel_size=k, padding=k // 2, stride=s), nn.BatchNorm2d(out_ch), nn.LeakyReLU(), ) def forward(self, x): return self.layer(x) class Resnet(nn.Module): def __init__(self, ch): super().__init__() self.resblock = nn.Sequential( ConvBnLeakRelu(ch, ch // 2, k=1), ConvBnLeakRelu(ch // 2, ch, k=3) ) def forward(self, x): return F.relu(self.resblock(x) + x) class Unsample(nn.Module): def __init__(self, in_ch, out_ch): super().__init__() self.layer = ConvBnLeakRelu(in_ch, out_ch, k=3, s=2) def forward(self, x): return self.layer(x) cif = [[32, 64, 1], [64, 128, 2], [128, 256, 8], [256, 512, 8], [512, 1024, 4]] class Net(nn.Module): def __init__(self): super().__init__() self.inpuiut_layer = nn.Conv2d(3, 32, 3, 1) # self.layer1 = self.make_layer(32, 64, 1) # self.layer2 = self.make_layer(64, 128, 2) # self.layer3 = self.make_layer(128, 256, 8) # self.layer4 = self.make_layer(256, 512, 8) # self.layer5 = self.make_layer(512, 1024, 4) self.layers = [] for in_ch,out_ch,block_num in cif: self.layers+=[self.make_layer(in_ch, out_ch,block_num)] self.layers=nn.Sequential(*self.layers) def make_layer(self, in_ch, out_ch, block_num): layers = [] layers += [Unsample(in_ch, out_ch)] for i in range(block_num): layers += [Resnet(out_ch)] return nn.Sequential(*layers) def forward(self, x): x = self.inpuiut_layer(x) x_52 = self.layers[0:3](x) x_26 = self.layers[3:4](x_52) x_13 = self.layers[4:5](x_26) return x_52, x_26, x_13 if __name__ == '__main__': x = torch.randn(1, 3, 416, 416) net = Net() out = net(x) for i in out: print(i.shape)
4. 用循环写法将输入输出提取出来yaml文件
import torch
import yaml
from torch import nn
import torch.nn.functional as F
class ConvBnLeakRelu(nn.Module):
def __init__(self, in_ch, out_ch, k=3, s=1):
super().__init__()
self.layer = nn.Sequential(
nn.Conv2d(in_ch, out_ch, kernel_size=k, padding=k // 2, stride=s),
nn.BatchNorm2d(out_ch),
nn.LeakyReLU(),
)
def forward(self, x):
return self.layer(x)
class Resnet(nn.Module):
def __init__(self, ch):
super().__init__()
self.resblock = nn.Sequential(
ConvBnLeakRelu(ch, ch // 2, k=1),
ConvBnLeakRelu(ch // 2, ch, k=3)
)
def forward(self, x):
return F.relu(self.resblock(x) + x)
class Unsample(nn.Module):
def __init__(self, in_ch, out_ch):
super().__init__()
self.layer = ConvBnLeakRelu(in_ch, out_ch, k=3, s=2)
def forward(self, x):
return self.layer(x)
# cif = [[32, 64, 1],
# [64, 128, 2],
# [128, 256, 8],
# [256, 512, 8],
# [512, 1024, 4]]
with open('tt.yaml','r') as file:
cif=yaml.load(file,Loader=yaml.Loader)
cif=cif['conf']
print(cif)
class Net(nn.Module):
def __init__(self):
super().__init__()
self.inpuiut_layer = nn.Conv2d(3, 32, 3, 1)
# self.layer1 = self.make_layer(32, 64, 1)
# self.layer2 = self.make_layer(64, 128, 2)
# self.layer3 = self.make_layer(128, 256, 8)
# self.layer4 = self.make_layer(256, 512, 8)
# self.layer5 = self.make_layer(512, 1024, 4)
self.layers = []
for in_ch,out_ch,block_num in cif:
self.layers+=[self.make_layer(in_ch, out_ch,block_num)]
self.layers=nn.Sequential(*self.layers)
def make_layer(self, in_ch, out_ch, block_num):
layers = []
layers += [Unsample(in_ch, out_ch)]
for i in range(block_num):
layers += [Resnet(out_ch)]
return nn.Sequential(*layers)
def forward(self, x):
x = self.inpuiut_layer(x)
x_52 = self.layers[0:3](x)
x_26 = self.layers[3:4](x_52)
x_13 = self.layers[4:5](x_26)
return x_52, x_26, x_13
if __name__ == '__main__':
x = torch.randn(1, 3, 416, 416)
net = Net()
out = net(x)
for i in out:
print(i.shape)
tt.yaml
#conf: # [ [ 32, 64, 1 ], # [ 64, 128, 2 ], # [ 128, 256, 8 ], # [ 256, 512, 8 ], # [ 512, 1024, 4 ] ] conf: - [ 32, 64, 1 ] - [ 64, 128, 2 ] - [ 128, 256, 8 ] - [ 256, 512, 8 ] - [ 512, 1024, 4 ]



















