-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathNeuralNets.py
More file actions
111 lines (81 loc) · 3.34 KB
/
NeuralNets.py
File metadata and controls
111 lines (81 loc) · 3.34 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
import torch
from torch import nn
#From class adapted
#Dropout layers excluded because overfitting for this dataset is not a big issue and they caused more problems in the regression than they improved the results
class NN_32_1(nn.Module):
def __init__(self, input_size):
super(NN_32_1, self).__init__()
self.hidden_1 = nn.Linear(input_size, 32)
self.output = nn.Linear(32,1)
def forward(self, x):
out1 = nn.ReLU()(self.hidden_1(x))
return self.output(out1)
class NN_64_2(nn.Module):
def __init__(self, input_size):
super(NN_64_2, self).__init__()
self.hidden_1 = nn.Linear(input_size, 64)
self.hidden_2 = nn.Linear(64, 32)
self.output = nn.Linear(32,1)
def forward(self, x):
out1 = nn.ReLU()(self.hidden_1(x))
out2 = nn.ReLU()(self.hidden_2(out1))
return self.output(out2)
class NN_128_3(nn.Module):
def __init__(self, input_size):
super(NN_128_3, self).__init__()
self.hidden_1 = nn.Linear(input_size, 128)
self.hidden_2 = nn.Linear(128, 64)
self.hidden_3 = nn.Linear(64, 32)
self.output = nn.Linear(32,1)
def forward(self, x):
out1 = nn.ReLU()(self.hidden_1(x))
out2 = nn.ReLU()(self.hidden_2(out1))
out3 = nn.ReLU()(self.hidden_3(out2))
return self.output(out3)
class NN_256_4(nn.Module):
def __init__(self, input_size):
super(NN_256_4, self).__init__()
self.hidden_1 = nn.Linear(input_size, 256)
self.hidden_2 = nn.Linear(256, 128)
self.hidden_3 = nn.Linear(128, 64)
self.hidden_4 = nn.Linear(64, 32)
self.output = nn.Linear(32,1)
def forward(self, x):
out1 = nn.ReLU()(self.hidden_1(x))
out2 = nn.ReLU()(self.hidden_2(out1))
out3 = nn.ReLU()(self.hidden_3(out2))
out4 = nn.ReLU()(self.hidden_4(out3))
return self.output(out4)
class NN_512_5(nn.Module):
def __init__(self, input_size):
super(NN_512_5, self).__init__()
self.hidden_1 = nn.Linear(input_size, 512)
self.hidden_2 = nn.Linear(512, 256)
self.hidden_3 = nn.Linear(256, 128)
self.hidden_4 = nn.Linear(128, 64)
self.hidden_5 = nn.Linear(64, 32)
self.output = nn.Linear(32,1)
def forward(self, x):
out1 = nn.ReLU()(self.hidden_1(x))
out2 = nn.ReLU()(self.hidden_2(out1))
out3 = nn.ReLU()(self.hidden_3(out2))
out4 = nn.ReLU()(self.hidden_4(out3))
out5 = nn.ReLU()(self.hidden_5(out4))
return self.output(out5)
class NN_1024_6(nn.Module):
def __init__(self, input_size):
super(NN_1024_6, self).__init__()
self.hidden_1 = nn.Linear(input_size, 1024)
self.hidden_2 = nn.Linear(1024,512)
self.hidden_3 = nn.Linear(512, 256)
self.hidden_4 = nn.Linear(256, 128)
self.hidden_5 = nn.Linear(128, 64)
self.hidden_6 = nn.Linear(64, 32)
self.output = nn.Linear(32,1)
def forward(self, x):
out1 = nn.ReLU()(self.hidden_1(x))
out2 = nn.ReLU()(self.hidden_2(out1))
out3 = nn.ReLU()(self.hidden_3(out2))
out4 = nn.ReLU()(self.hidden_4(out3))
out5 = nn.ReLU()(self.hidden_5(out4))
return self.output(out5)