https://inha-kim.tistory.com/47
이 그림을 참고하여 구현 하였습니다.
https://github.com/weiaicunzai/pytorch-cifar100/blob/master/models/resnet.py
이 코드 구현을 참고
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
|
import torch
import torch.nn as nn
from torchsummary import summary
class BasicBlock(nn.Module):
# ResNet Basic Block으로 18, 34 layer에 들어가는 것들이다.
# dimension 이 달라지는 경우
dimension_expansion = 1
# size 축소시에 stride=2로 전달받음
def __init__(self,in_channels,out_channels,stride=1):
super().__init__()
# stride = 1인 경우 size 유지, stride가 2 인 경우 size 2로 나눠짐
self.residual_function = nn.Sequential(
nn.Conv2d(in_channels, out_channels, kernel_size=3,stride=stride,padding=1,bias=False),
nn.BatchNorm2d(out_channels),
nn.ReLU(inplace=True),
nn.Conv2d(out_channels, out_channels * BasicBlock.dimension_expansion, kernel_size=3,stride=1,padding=1,bias=False),
nn.BatchNorm2d(out_channels * BasicBlock.dimension_expansion),
)
# Shortcut
self.shortcut = nn.Sequential()
if stride != 1 or in_channels != out_channels * BasicBlock.dimension_expansion:
self.shortcut = nn.Sequential(
nn.Conv2d(in_channels, out_channels * BasicBlock.dimension_expansion, kernel_size = 1,stride = stride, bias=False),
nn.BatchNorm2d(out_channels * BasicBlock.dimension_expansion),
)
# input, output 결합
self.relu = nn.ReLU(inplace=True)
def forward(self,x):
out = self.residual_function(x) + self.shortcut(x)
out = self.relu(out)
return out
class BottleNeck(nn.Module):
ㄱ # ResNet BottleNeck Block으로 50,101,152 layer에 들어가는 것들이다.
# dimension 이 달라지는 경우가 존재, 4배로 커지는 경향이 있음
dimension_expansion = 4
# size 축소시에 stride=2로 전달받음
def __init__(self,in_channels,out_channels,stride=1):
super().__init__()
self.residual_function = nn.Sequential(
nn.Conv2d(in_channels,out_channels,kernel_size=1,stride=1,bias=False),
nn.BatchNorm2d(out_channels),
nn.ReLU(inplace=True),
nn.Conv2d(out_channels,out_channels,kernel_size=3,stride=stride,padding=1,bias=False),
nn.BatchNorm2d(out_channels),
nn.ReLU(inplace=True),
nn.Conv2d(out_channels,out_channels*BottleNeck.dimension_expansion,kernel_size=1,stride=1,bias=False),
nn.BatchNorm2d(out_channels*BottleNeck.dimension_expansion),
)
self.shortcut = nn.Sequential()
if stride != 1 or in_channels != out_channels * BottleNeck.dimension_expansion:
self.shortcut = nn.Sequential(
nn.Conv2d(in_channels, out_channels * BottleNeck.dimension_expansion, kernel_size = 1,stride = stride, bias=False),
nn.BatchNorm2d(out_channels * BottleNeck.dimension_expansion),
)
self.relu = nn.ReLU(inplace=True)
def forward(self,x):
out = self.residual_function(x) + self.shortcut(x)
out = self.relu(out)
return out
class ResNet(nn.Module):
# ResNet class
def __init__(self,block_type,num_blocks,num_classes=10):
super().__init__()
self.in_channels = 64
self.conv1 = nn.Sequential(
nn.Conv2d(3,64,kernel_size = 7,stride = 2,padding = 3,bias=False),
nn.BatchNorm2d(64),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=3,stride=2,padding=1)
)
# maxpool2d가 사실 conv2_x에 속해서 size를 반으로 줄임,그러므로 stride=1로 전달
self.conv2_x = self.make_layer(block_type,64,num_blocks[0],1)
self.conv3_x = self.make_layer(block_type,128,num_blocks[1],2)
self.conv4_x = self.make_layer(block_type,256,num_blocks[2],2)
self.conv5_x = self.make_layer(block_type,512,num_blocks[3],2)
self.avg_pool = nn.AdaptiveAvgPool2d((1,1))
self.fc = nn.Linear(512 * block_type.dimension_expansion ,num_classes)
def make_layer(self,block_type,out_channels,num_block,stride):
# stride가 2 일 경우 list 맨 앞만 2, 나머진 1
strides = [stride] + [1] * (num_block-1)
layers = []
for stride in strides:
layers.append(block_type(self.in_channels,out_channels,stride))
self.in_channels = out_channels * block_type.dimension_expansion
return nn.Sequential(*layers)
def forward(self,x):
out = self.conv1(x)
out = self.conv2_x(out)
out = self.conv3_x(out)
out = self.conv4_x(out)
out = self.conv5_x(out)
out = self.avg_pool(out)
out = out.view(out.size(0),-1)
out = self.fc(out)
return out
def resnet18():
""" return a ResNet 18 object
"""
return ResNet(BasicBlock, [2, 2, 2, 2])
def resnet34():
""" return a ResNet 34 object
"""
return ResNet(BasicBlock, [3, 4, 6, 3])
def resnet50():
""" return a ResNet 50 object
"""
return ResNet(BottleNeck, [3, 4, 6, 3])
def resnet101():
""" return a ResNet 101 object
"""
return ResNet(BottleNeck, [3, 4, 23, 3])
def resnet152():
""" return a ResNet 152 object
"""
return ResNet(BottleNeck, [3, 8, 36, 3])
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
print(device)
model = resnet50().to(device)
summary(model, (3, 224, 224), device=device.type)
|
cs |