-
Notifications
You must be signed in to change notification settings - Fork 5
/
Copy pathcnn_mnist_class_style.py
96 lines (77 loc) · 2.43 KB
/
cnn_mnist_class_style.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
import torch
import torch.nn as nn
import torch.nn.functional as F
class MyCNN_Functional(nn.Module):
def __init__(self):
super(MyCNN_FStyle, self).__init__()
self.conv1 = nn.Conv2d( 1, 32, 3, 1, 1)
self.conv2 = nn.Conv2d(32, 64, 3, 1, 1)
self.fc1 = nn.Linear(64*7*7, 512)
self.fc2 = nn.Linear(512, 10)
def forward(self, x):
x = F.relu(self.conv1(x))
x = F.max_pool2d(x, 2)
x = F.relu(self.conv2(x))
x = F.max_pool2d(x, 2)
x = F.dropout(x, 0.2, self.training)
x = torch.flatten(x, 1)
x = F.relu(self.fc1(x))
x = F.dropout(x, 0.2, self.training)
x = F.log_softmax(self.fc2(x), dim=1)
return x
class MyCNN_Object(nn.Module):
def __init__(self):
super(MyCNN_ObjStyle, self).__init__()
self.conv1 = nn.Conv2d(1, 32, 3, 1, 1)
self.relu1 = nn.ReLU()
self.pool1 = nn.MaxPool2d(2)
self.conv2 = nn.Conv2d(32, 64, 3, 1, 1)
self.relu2 = nn.ReLU()
self.pool2 = nn.MaxPool2d(2)
self.drop2 = nn.Dropout(0.2)
self.fc3 = nn.Linear(64*7*7, 512)
self.relu3 = nn.ReLU()
self.drop3 = nn.Dropout(0.2)
self.fc4 = nn.Linear(512, 10)
self.smax4 = nn.LogSoftmax(dim=1)
def forward(self, x):
x = self.conv1(x)
x = self.relu1(x)
x = self.pool1(x)
x = self.conv2(x)
x = self.relu2(x)
x = self.pool2(x)
x = self.drop2(x)
x = torch.flatten(x, 1)
x = self.fc3(x)
x = self.relu3(x)
x = self.drop3(x)
x = self.fc4(x)
x = self.smax4(x)
return x
class MyCNN_Layer(nn.Module):
def __init__(self):
super(MyCNN_SeqStyle, self).__init__()
self.layer1 = nn.Sequential(
nn.Conv2d(1, 32, 3, 1, 1),
nn.ReLU(),
nn.MaxPool2d(2))
self.layer2 = nn.Sequential(
nn.Conv2d(32, 64, 3, 1, 1),
nn.ReLU(),
nn.MaxPool2d(2),
nn.Dropout(0.2))
self.layer3 = nn.Sequential(
nn.Linear(64*7*7, 512),
nn.ReLU(),
nn.Dropout(0.2))
self.layer4 = nn.Sequential(
nn.Linear(512, 10),
nn.LogSoftmax(dim=1))
def forward(self, x):
x = self.layer1(x)
x = self.layer2(x)
x = torch.flatten(x, 1)
x = self.layer3(x)
x = self.layer4(x)
return x