Switch to unified view

a b/combinedDeepLearningActiveContour/minFunc/example_minFunc.m
1
% Runs various limited-memory solvers on 2D rosenbrock function for 25
2
% function evaluations
3
maxFunEvals = 25;
4
5
fprintf('Result after %d evaluations of limited-memory solvers on 2D rosenbrock:\n',maxFunEvals);
6
7
fprintf('---------------------------------------\n');
8
fprintf('x1 = %.4f, x2 = %.4f (starting point)\n',0,0);
9
fprintf('x1 = %.4f, x2 = %.4f (optimal solution)\n',1,1);
10
fprintf('---------------------------------------\n');
11
12
if exist('minimize') == 2
13
    % Minimize.m - conjugate gradient method
14
    x = minimize([0 0]', 'rosenbrock', -maxFunEvals);
15
    fprintf('x1 = %.4f, x2 = %.4f (minimize.m by C. Rasmussen)\n',x(1),x(2));
16
end
17
18
options = [];
19
options.display = 'none';
20
options.maxFunEvals = maxFunEvals;
21
22
% Steepest Descent
23
options.Method = 'sd';
24
x = minFunc(@rosenbrock,[0 0]',options);
25
fprintf('x1 = %.4f, x2 = %.4f (minFunc with steepest descent)\n',x(1),x(2));
26
27
% Cyclic Steepest Descent
28
options.Method = 'csd';
29
x = minFunc(@rosenbrock,[0 0]',options);
30
fprintf('x1 = %.4f, x2 = %.4f (minFunc with cyclic steepest descent)\n',x(1),x(2));
31
32
% Barzilai & Borwein
33
options.Method = 'bb';
34
options.bbType = 1;
35
x = minFunc(@rosenbrock,[0 0]',options);
36
fprintf('x1 = %.4f, x2 = %.4f (minFunc with spectral gradient descent)\n',x(1),x(2));
37
38
% Hessian-Free Newton
39
options.Method = 'newton0';
40
x = minFunc(@rosenbrock,[0 0]',options);
41
fprintf('x1 = %.4f, x2 = %.4f (minFunc with Hessian-free Newton)\n',x(1),x(2));
42
43
% Hessian-Free Newton w/ L-BFGS preconditioner
44
options.Method = 'pnewton0';
45
x = minFunc(@rosenbrock,[0 0]',options);
46
fprintf('x1 = %.4f, x2 = %.4f (minFunc with preconditioned Hessian-free Newton)\n',x(1),x(2));
47
48
% Conjugate Gradient
49
options.Method = 'cg';
50
x = minFunc(@rosenbrock,[0 0]',options);
51
fprintf('x1 = %.4f, x2 = %.4f (minFunc with conjugate gradient)\n',x(1),x(2));
52
53
% Scaled conjugate Gradient
54
options.Method = 'scg';
55
x = minFunc(@rosenbrock,[0 0]',options);
56
fprintf('x1 = %.4f, x2 = %.4f (minFunc with scaled conjugate gradient)\n',x(1),x(2));
57
58
% Preconditioned Conjugate Gradient
59
options.Method = 'pcg';
60
x = minFunc(@rosenbrock,[0 0]',options);
61
fprintf('x1 = %.4f, x2 = %.4f (minFunc with preconditioned conjugate gradient)\n',x(1),x(2));
62
63
% Default: L-BFGS (default)
64
options.Method = 'lbfgs';
65
x = minFunc(@rosenbrock,[0 0]',options);
66
fprintf('x1 = %.4f, x2 = %.4f (minFunc with limited-memory BFGS - default)\n',x(1),x(2));
67
68
fprintf('---------------------------------------\n');
69
70
71
72
73
74
75