-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathl1-factorial-sigmoid.py
More file actions
97 lines (68 loc) · 2.21 KB
/
l1-factorial-sigmoid.py
File metadata and controls
97 lines (68 loc) · 2.21 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
import math
import random
# ---------- 激活函数 ----------
def sigmoid(x):
return 1.0 / (1.0 + math.exp(-x))
def sigmoid_derivative(y):
return y * (1 - y)
# ---------- 神经网络 ----------
class FactorialNN:
def __init__(self, hidden_size=10):
self.hidden_size = hidden_size
# 输入 -> 隐藏
self.w1 = [random.uniform(-1, 1) for _ in range(hidden_size)]
self.b1 = [random.uniform(-1, 1) for _ in range(hidden_size)]
# 隐藏 -> 输出
self.w2 = [random.uniform(-1, 1) for _ in range(hidden_size)]
self.b2 = random.uniform(-1, 1)
def forward(self, x):
# 隐藏层
h = []
for i in range(self.hidden_size):
z = self.w1[i] * x + self.b1[i]
h.append(sigmoid(z))
# 输出层(线性)
y = sum(self.w2[i] * h[i] for i in range(self.hidden_size)) + self.b2
return h, y
def train(self, x, target, lr=0.01):
h, y = self.forward(x)
error = y - target
# 输出层梯度
dy = error # 线性输出,导数=1
# 隐藏层梯度
dh = [
dy * self.w2[i] * sigmoid_derivative(h[i])
for i in range(self.hidden_size)
]
# 更新隐藏 -> 输出
for i in range(self.hidden_size):
self.w2[i] -= lr * dy * h[i]
self.b2 -= lr * dy
# 更新输入 -> 隐藏
for i in range(self.hidden_size):
self.w1[i] -= lr * dh[i] * x
self.b1[i] -= lr * dh[i]
return error ** 2
# ---------- 训练数据 ----------
MAX_N = 10
def log_factorial(n):
return math.log(math.factorial(n))
training_data = [
(n / MAX_N, log_factorial(n))
for n in range(1, MAX_N + 1)
]
nn = FactorialNN(hidden_size=10)
# ---------- 训练 ----------
for epoch in range(20000):
loss = 0
for x, y in training_data:
loss += nn.train(x, y)
if epoch % 2000 == 0:
print(f"Epoch {epoch}, Loss: {loss:.4f}")
# ---------- 测试 ----------
print("\nPrediction:")
for n in range(1, MAX_N + 1):
x = n / MAX_N
_, y_pred = nn.forward(x)
fact_pred = math.exp(y_pred)
print(f"{n}! ≈ {fact_pred:.1f} (true: {math.factorial(n)})")