forked from YashaPushak/PredictingPRNGs
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathgradDesc.m
More file actions
40 lines (31 loc) · 835 Bytes
/
gradDesc.m
File metadata and controls
40 lines (31 loc) · 835 Bytes
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
function [w,f] = gradDesc(funObj,w,maxEvals,varargin)
% Find local minimizer of differentiable function
% Step size
alpha = 1e-2;
% Evaluate initial function and gradient
[f,g] = funObj(w,varargin{:});
funEvals = 1;
while funEvals < maxEvals
wold = w;
if(funEvals == 1)
%normal gradient descent to start
w = w - alpha*g;
else
%BB step
alpha = (s'*y)/(y'*y);
w = w - alpha*g;
end
gold = g;
[f,g] = funObj(w,varargin{:});
funEvals = funEvals + 1;
%BB update s and y
s = w - wold;
y = g - gold;
% Print out how we are doing
optCond = norm(g,'inf');
fprintf('%6d %15.5e %15.5e %15.5e\n',funEvals,alpha,f,optCond);
if optCond < 0.1
fprintf('Solution found with optCond < 0.1\n');
break;
end
end