-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy path0_gradient_descent.py
More file actions
60 lines (48 loc) · 1.84 KB
/
0_gradient_descent.py
File metadata and controls
60 lines (48 loc) · 1.84 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
import numpy as np
def compute_error_for_line_given_points(b, w, points):
# 初始化误差
total_error = 0
for i in range(0, len(points)):
x = points[i, 0]
y = points[i, 1]
# 计算误差平方和
total_error += (y - (w * x + b)) ** 2
# 每个数据的平均误差
return total_error / float(len(points))
def step_gradient(b_current, w_current, points, learning_rate):
b_gradient = 0
w_gradient = 0
n = float(len(points))
for i in range(0, len(points)):
x = points[i, 0]
y = points[i, 1]
# 更新梯度
b_gradient += (2/n) * ((w_current * x + b_current) - y)
w_gradient += (2/n) * x * ((w_current * x + b_current) - y)
new_b = b_current - (learning_rate * b_gradient)
new_w = w_current - (learning_rate * w_gradient)
return [new_b, new_w]
def gradient_descent_runner(points, starting_b, starting_w, learning_rate, num_iterations):
b = starting_b
w = starting_w
for i in range(num_iterations):
b, w = step_gradient(b, w, np.array(points), learning_rate)
return [b, w]
def run():
points = np.genfromtxt("data/data.csv", delimiter=",")
learning_rate = 0.0001
initial_b = 0 # initial y-intercept guess
initial_w = 0 # initial slope guess
num_iterations = 1000
print("Starting gradient descent at b = {0}, w = {1}, error = {2}"
.format(initial_b, initial_w,
compute_error_for_line_given_points(initial_b, initial_w, points))
)
print("Running...")
[b, w] = gradient_descent_runner(points, initial_b, initial_w, learning_rate, num_iterations)
print("After {0} iterations b = {1}, w = {2}, error = {3}".
format(num_iterations, b, w,
compute_error_for_line_given_points(b, w, points))
)
if __name__ == '__main__':
run()