-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathnn.py
More file actions
executable file
·107 lines (78 loc) · 2.18 KB
/
nn.py
File metadata and controls
executable file
·107 lines (78 loc) · 2.18 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
import numpy as np
from math import exp
class Layer():
# Neural Network Layer interface
def __init__(self):
pass
def forward(self, input):
pass
def isWeighted(self):
pass
def getDimensions(self):
pass
class Dense(Layer):
# Fully connected layer
def __init__(self, input_units, output_units):
self.input_units, self.output_units = input_units, output_units
self.weights = np.random.randn(input_units, output_units)
self.bias = np.zeros((1, output_units))
def forward(self, input):
return np.dot(input, self.weights) + self.bias
def isWeighted(self):
return True
def getDimensions(self):
return input_units, output_units
def getWeights(self):
return self.weights
def setWeights(self, weights):
assert(weights.shape == self.weights.shape)
self.weights = weights
def getBias(self):
return self.bias
def setBias(self, bias):
assert(bias.shape == self.bias.shape)
self.bias = bias
class ReLU(Layer):
# ReLU activation unit
def __init__(self):
pass
def forward(self, input):
return np.maximum(0, input)
def isWeighted(self):
return False
def getDimensions(self):
return 1, 1
class Sigmoid(Layer):
# Sigmoid activation unit
def __init__(self):
pass
def forward(self, input):
return 1.0/(1.0 + np.exp(-input))
def isWeighted(self):
return False
def getDimensions(self):
return 1, 1
class Softmax(Layer):
# Sigmoid activation unit
def __init__(self):
pass
def forward(self, input):
input /= np.max(np.abs(input))
exp = np.exp(input)
sum = np.sum(exp)
if sum == 0:
sum += 1e9
return exp/sum
def isWeighted(self):
return False
def getDimensions(self):
return 1, 1
class NeuralNetwork():
def __init__(self):
self.layers = list()
def calculate(self, input):
for layer in self.layers:
input = layer.forward(input)
return input
def addLayer(self, layer):
self.layers.append(layer)