-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathnumerical_gradient.R
More file actions
70 lines (66 loc) · 1.79 KB
/
numerical_gradient.R
File metadata and controls
70 lines (66 loc) · 1.79 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
# 2층 신경망 구현을 위한 수치미분 함수입니다.
# 이 함수 코드에 대한 설명은 https://choosunsick.github.io/post/neural_network_5/ 에서 찾아보실 수 있습니다.
numerical_gradient_W1 <- function(f,x,t){
h <- 1e-4
vec <- matrix(0, nrow = nrow(W1) ,ncol = ncol(W1))
for(i in 1:length(W1)){
origin <- W1[i]
W1[i] <<- (W1[i] + h)
fxh1 <- f(x, t)
W1[i] <<- (W1[i] - (2*h))
fxh2 <- f(x, t)
vec[i] <- (fxh1 - fxh2) / (2*h)
W1[i] <<- origin
}
return(vec)
}
numerical_gradient_W2 <- function(f,x,t){
h <- 1e-4
vec <- matrix(0, nrow = nrow(W2) ,ncol = ncol(W2))
for(i in 1:length(W2)){
origin <- W2[i]
W2[i] <<- (W2[i] + h)
fxh1 <- f(x, t)
W2[i] <<- (W2[i] - (2*h))
fxh2 <- f(x, t)
vec[i] <- (fxh1 - fxh2) / (2*h)
W2[i] <<- origin
}
return(vec)
}
numerical_gradient_b1 <- function(f,x,t){
h <- 1e-4
vec <- matrix(0, nrow = nrow(b1) ,ncol = ncol(b1))
for(i in 1:length(b1)){
origin <- b1[i]
b1[i] <<- (b1[i] + h)
fxh1 <- f(x, t)
b1[i] <<- (b1[i] - (2*h))
fxh2 <- f(x, t)
vec[i] <- (fxh1 - fxh2) / (2*h)
b1[i] <<- origin
}
return(vec)
}
numerical_gradient_b2 <- function(f,x,t){
h <- 1e-4
vec <- matrix(0, nrow = nrow(b2) ,ncol = ncol(b2))
for(i in 1:length(b2)){
origin <- b2[i]
b2[i] <<- (b2[i] + h)
fxh1 <- f(x, t)
b2[i] <<- (b2[i] - (2*h))
fxh2 <- f(x, t)
vec[i] <- (fxh1 - fxh2) / (2*h)
b2[i] <<- origin
}
return(vec)
}
numerical_gradient <- function(f,x,t) {
grads <- list(W1 = numerical_gradient_W1(f,x,t),
b1 = numerical_gradient_b1(f,x,t),
W2 = numerical_gradient_W2(f,x,t),
b2 = numerical_gradient_b2(f,x,t))
return(grads)
}
numerical_gradient <- compiler::cmpfun(numerical_gradient)