-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathhw5_8-9.py
More file actions
74 lines (59 loc) · 1.58 KB
/
hw5_8-9.py
File metadata and controls
74 lines (59 loc) · 1.58 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
import numpy as np
from numpy import exp, log
#
def logistic(z):
return 1/(1+exp(-z))
a = logistic
#
def d_logistic(z):
return logistic(z)*(1-logistic(z))
d_a = d_logistic
#
def cross_entropy(y_, y):
return np.mean(-(y_*log(y)+(1-y_)*log(1-y)))
E = cross_entropy
#
def d_cross_entropy(y_, x, w):
y = g(x, w)
return (y - y_)*x
d_E = d_cross_entropy
def g(w, X):
return a(np.dot(X, w))
RUN = 100
N = 100
Nout = 1000
LR = 0.01
Eout = np.empty(RUN)
Epochs = np.empty(RUN)
for run in range(RUN):
# create f = mx + b
[f_p1_x, f_p1_y], [f_p2_x, f_p2_y] = np.random.random((2, 2)) * 2 - 1
m = (f_p1_y - f_p2_y) / (f_p1_x - f_p2_x)
b = f_p1_y - m * f_p1_x
print("m, b of f: m:%f, b:%f" % (m, b))
def f(X):
# x[0] is bias
X = np.atleast_2d(X)
return np.asarray((m * X[:, 1] + b <= X[:, 2]), dtype=np.int)
# create X, Xout
X = np.column_stack((np.ones(N), np.random.random((N, 2))))
Xout = np.column_stack((np.ones(Nout), np.random.random((Nout, 2))))
#
w = np.zeros(3)
epoch = 0
while True:
epoch += 1
ns = np.random.permutation(N)
w_prev_epoch = w.copy()
for i in range(N):
x = X[ns[i]]
y_ = f(x)
w -= LR * d_E(y_, x, w)
if np.sqrt(np.sum((w - w_prev_epoch)**2)) < 0.01:
Epochs[run] = epoch
Eout[run] = E(f(Xout), g(w, Xout))
print('run:', run, 'epoch:', epoch, "w:", w, "Ein:", E(f(X), g(w, X)), "Eout:", E(f(Xout), g(w, Xout)))
break
# problem 8, 9
print(Eout.mean())
print(Epochs.mean())