1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55
| import torch from torch import nn class AlexNet(nn.Module): def __init__(self): super(AlexNet, self).__init__() ''' 这里搭建卷积层,需要按顺序定义卷积层、 激活函数、最大池化层、卷积层、激活函数、 最大池化层、卷积层、激活函数、卷积层、 激活函数、卷积层、激活函数、最大池化层, 具体形状见测试说明 ''' self.conv = nn.Sequential( nn.Conv2d(1, 96, kernel_size=(11, 11), stride=(4, 4)), nn.ReLU(), nn.MaxPool2d(kernel_size=3, stride=2, padding=0, dilation=1, ceil_mode=False), nn.Conv2d(96, 256, kernel_size=(5, 5), stride=(1, 1), padding=(2, 2)), nn.ReLU(), nn.MaxPool2d(kernel_size=3, stride=2, padding=0, dilation=1, ceil_mode=False), nn.Conv2d(256, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)), nn.ReLU(), nn.Conv2d(384, 384, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)), nn.ReLU(), nn.Conv2d(384, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1)), nn.ReLU(), nn.MaxPool2d(kernel_size=3, stride=2, padding=0, dilation=1, ceil_mode=False), ) ''' 这里搭建全连接层,需要按顺序定义 全连接层、激活函数、丢弃法、 全连接层、激活函数、丢弃法、全连接层, 具体形状见测试说明 ''' self.fc = nn.Sequential( nn.Linear(in_features=6400, out_features=4096, bias=True), nn.ReLU(), nn.Dropout(p=0.5), nn.Linear(in_features=4096, out_features=4096, bias=True), nn.ReLU(), nn.Dropout(p=0.5), nn.Linear(in_features=4096, out_features=10, bias=True), ) def forward(self, img): ''' 这里需要定义前向计算 ''' feature = self.conv(img) output = self.fc(feature.view(img.shape[0], -1)) return output
|