forked from nicklashansen/adaptive-learning-rate-schedule
-
Notifications
You must be signed in to change notification settings - Fork 0
/
lenet.py
32 lines (27 loc) · 1022 Bytes
/
lenet.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
import torch.nn as nn
class LeNet5(nn.Module):
"""
Implementation of LeNet-5 as described in http://yann.lecun.com/exdb/publis/pdf/lecun-01a.pdf.
"""
def __init__(self, num_channels_in=3, num_classes=10, img_dims=(32, 32)):
super(LeNet5, self).__init__()
assert img_dims == (32, 32) or (28, 28)
self.conv = nn.Sequential(
nn.Conv2d(num_channels_in, 6, kernel_size=(5, 5), padding=(0, 0) if img_dims == (32, 32) else (2, 2)),
nn.ReLU(),
nn.MaxPool2d(kernel_size=(2, 2), stride=2),
nn.Conv2d(6, 16, kernel_size=(5, 5)),
nn.ReLU(),
nn.MaxPool2d(kernel_size=(2, 2), stride=2),
nn.Conv2d(16, 120, kernel_size=(5, 5)),
nn.ReLU()
)
self.fc = nn.Sequential(
nn.Linear(120, 84),
nn.ReLU(),
nn.Linear(84, num_classes)
)
def forward(self, x):
x = self.conv(x)
x = x.view(x.size(0), -1)
return self.fc(x)