1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41
| mport torch.nn as nn from torch.nn import functional as F from torchsummary import summary
class AlexNet(nn.Module): def __init__(self, num_classes=1000): super(AlexNet, self).__init__() self.conv1 = nn.Conv2d(in_channels=3, out_channels=96, kernel_size=(11, 11), stride=(4, 4), padding=(1, 1)) self.conv2 = nn.Conv2d(in_channels=96, out_channels=256, kernel_size=(5, 5), groups=2, padding=(2, 2)) self.conv3 = nn.Conv2d(in_channels=256, out_channels=384, padding=(1, 1), kernel_size=(3, 3)) self.conv4 = nn.Conv2d(in_channels=384, out_channels=384, padding=(1, 1), kernel_size=(3, 3)) self.conv5 = nn.Conv2d(in_channels=384, out_channels=256, padding=(1, 1), kernel_size=(3, 3))
self.fc1 = nn.Linear(in_features=6 * 6 * 256, out_features=4096) self.fc2 = nn.Linear(in_features=4096, out_features=4096) self.fc3 = nn.Linear(in_features=4096, out_features=num_classes)
def forward(self, t): t = F.relu(self.conv1(t)) t = F.max_pool2d(t, kernel_size=(3, 3), stride=(2, 2)) t = F.relu(self.conv2(t)) t = F.max_pool2d(t, kernel_size=(3, 3), stride=(2, 2)) t = F.relu(self.conv3(t)) t = F.relu(self.conv4(t)) t = F.relu(self.conv5(t)) t = F.max_pool2d(t, kernel_size=(3, 3), stride=(2, 2))
t = t.view(-1, 6 * 6 * 256) t = F.relu(self.fc1(t)) t = F.relu(self.fc2(t)) t = F.relu(self.fc3(t))
return t
if __name__ == '__main__': alex_net = AlexNet() alex_net = alex_net.cuda() summary(alex_net, (3, 227, 227))
|