|
a |
|
b/combinedDeepLearningActiveContour/minFunc/lbfgsC.c |
|
|
1 |
#include <math.h> |
|
|
2 |
#include "mex.h" |
|
|
3 |
|
|
|
4 |
/* See lbfgs.m for details! */ |
|
|
5 |
/* This function may not exit gracefully on bad input! */ |
|
|
6 |
|
|
|
7 |
|
|
|
8 |
void mexFunction(int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[]) |
|
|
9 |
{ |
|
|
10 |
/* Variable Declarations */ |
|
|
11 |
|
|
|
12 |
double *s, *y, *g, *H, *d, *ro, *alpha, *beta, *q, *r; |
|
|
13 |
int nVars,nSteps,lhs_dims[2]; |
|
|
14 |
double temp; |
|
|
15 |
int i,j; |
|
|
16 |
|
|
|
17 |
/* Get Input Pointers */ |
|
|
18 |
|
|
|
19 |
g = mxGetPr(prhs[0]); |
|
|
20 |
s = mxGetPr(prhs[1]); |
|
|
21 |
y = mxGetPr(prhs[2]); |
|
|
22 |
H = mxGetPr(prhs[3]); |
|
|
23 |
|
|
|
24 |
/* Compute number of variables (p), rank of update (d) */ |
|
|
25 |
|
|
|
26 |
nVars = mxGetDimensions(prhs[1])[0]; |
|
|
27 |
nSteps = mxGetDimensions(prhs[1])[1]; |
|
|
28 |
|
|
|
29 |
/* Allocated Memory for Function Variables */ |
|
|
30 |
ro = mxCalloc(nSteps,sizeof(double)); |
|
|
31 |
alpha = mxCalloc(nSteps,sizeof(double)); |
|
|
32 |
beta = mxCalloc(nSteps,sizeof(double)); |
|
|
33 |
q = mxCalloc(nVars*(nSteps+1),sizeof(double)); |
|
|
34 |
r = mxCalloc(nVars*(nSteps+1),sizeof(double)); |
|
|
35 |
|
|
|
36 |
/* Set-up Output Vector */ |
|
|
37 |
|
|
|
38 |
lhs_dims[0] = nVars; |
|
|
39 |
lhs_dims[1] = 1; |
|
|
40 |
|
|
|
41 |
plhs[0] = mxCreateNumericArray(2,lhs_dims,mxDOUBLE_CLASS,mxREAL); |
|
|
42 |
d = mxGetPr(plhs[0]); |
|
|
43 |
|
|
|
44 |
/* ro = 1/(y(:,i)'*s(:,i)) */ |
|
|
45 |
for(i=0;i<nSteps;i++) |
|
|
46 |
{ |
|
|
47 |
temp = 0; |
|
|
48 |
for(j=0;j<nVars;j++) |
|
|
49 |
{ |
|
|
50 |
temp += y[j+nVars*i]*s[j+nVars*i]; |
|
|
51 |
} |
|
|
52 |
ro[i] = 1/temp; |
|
|
53 |
} |
|
|
54 |
|
|
|
55 |
/* q(:,k+1) = g */ |
|
|
56 |
for(i=0;i<nVars;i++) |
|
|
57 |
{ |
|
|
58 |
q[i+nVars*nSteps] = g[i]; |
|
|
59 |
} |
|
|
60 |
|
|
|
61 |
for(i=nSteps-1;i>=0;i--) |
|
|
62 |
{ |
|
|
63 |
/* alpha(i) = ro(i)*s(:,i)'*q(:,i+1) */ |
|
|
64 |
alpha[i] = 0; |
|
|
65 |
for(j=0;j<nVars;j++) |
|
|
66 |
{ |
|
|
67 |
alpha[i] += s[j+nVars*i]*q[j+nVars*(i+1)]; |
|
|
68 |
} |
|
|
69 |
alpha[i] *= ro[i]; |
|
|
70 |
|
|
|
71 |
/* q(:,i) = q(:,i+1)-alpha(i)*y(:,i) */ |
|
|
72 |
for(j=0;j<nVars;j++) |
|
|
73 |
{ |
|
|
74 |
q[j+nVars*i]=q[j+nVars*(i+1)]-alpha[i]*y[j+nVars*i]; |
|
|
75 |
} |
|
|
76 |
} |
|
|
77 |
|
|
|
78 |
/* r(:,1) = q(:,1) */ |
|
|
79 |
for(i=0;i<nVars;i++) |
|
|
80 |
{ |
|
|
81 |
r[i] = H[0]*q[i]; |
|
|
82 |
} |
|
|
83 |
|
|
|
84 |
for(i=0;i<nSteps;i++) |
|
|
85 |
{ |
|
|
86 |
/* beta(i) = ro(i)*y(:,i)'*r(:,i) */ |
|
|
87 |
beta[i] = 0; |
|
|
88 |
for(j=0;j<nVars;j++) |
|
|
89 |
{ |
|
|
90 |
beta[i] += y[j+nVars*i]*r[j+nVars*i]; |
|
|
91 |
} |
|
|
92 |
beta[i] *= ro[i]; |
|
|
93 |
|
|
|
94 |
/* r(:,i+1) = r(:,i) + s(:,i)*(alpha(i)-beta(i)) */ |
|
|
95 |
for(j=0;j<nVars;j++) |
|
|
96 |
{ |
|
|
97 |
r[j+nVars*(i+1)]=r[j+nVars*i]+s[j+nVars*i]*(alpha[i]-beta[i]); |
|
|
98 |
} |
|
|
99 |
} |
|
|
100 |
|
|
|
101 |
/* d = r(:,k+1) */ |
|
|
102 |
for(i=0;i<nVars;i++) |
|
|
103 |
{ |
|
|
104 |
d[i]=r[i+nVars*nSteps]; |
|
|
105 |
} |
|
|
106 |
|
|
|
107 |
/* Free Memory */ |
|
|
108 |
|
|
|
109 |
mxFree(ro); |
|
|
110 |
mxFree(alpha); |
|
|
111 |
mxFree(beta); |
|
|
112 |
mxFree(q); |
|
|
113 |
mxFree(r); |
|
|
114 |
|
|
|
115 |
} |