|
a |
|
b/combinedDeepLearningActiveContour/functions/params2stack.m |
|
|
1 |
function stack = params2stack(params, netconfig) |
|
|
2 |
|
|
|
3 |
% Converts a flattened parameter vector into a nice "stack" structure |
|
|
4 |
% for us to work with. This is useful when you're building multilayer |
|
|
5 |
% networks. |
|
|
6 |
% |
|
|
7 |
% stack = params2stack(params, netconfig) |
|
|
8 |
% |
|
|
9 |
% params - flattened parameter vector |
|
|
10 |
% netconfig - auxiliary variable containing |
|
|
11 |
% the configuration of the network |
|
|
12 |
% |
|
|
13 |
|
|
|
14 |
|
|
|
15 |
% Map the params (a vector into a stack of weights) |
|
|
16 |
depth = numel(netconfig.layersizes); |
|
|
17 |
stack = cell(depth,1); |
|
|
18 |
prevLayerSize = netconfig.inputsize; % the size of the previous layer |
|
|
19 |
curPos = double(1); % mark current position in parameter vector |
|
|
20 |
|
|
|
21 |
for d = 1:depth |
|
|
22 |
% Create layer d |
|
|
23 |
stack{d} = struct; |
|
|
24 |
|
|
|
25 |
% Extract weights |
|
|
26 |
wlen = double(netconfig.layersizes{d} * prevLayerSize); |
|
|
27 |
stack{d}.w = reshape(params(curPos:curPos+wlen-1), netconfig.layersizes{d}, prevLayerSize); |
|
|
28 |
curPos = curPos+wlen; |
|
|
29 |
|
|
|
30 |
% Extract bias |
|
|
31 |
blen = double(netconfig.layersizes{d}); |
|
|
32 |
stack{d}.b = reshape(params(curPos:curPos+blen-1), netconfig.layersizes{d}, 1); |
|
|
33 |
curPos = curPos+blen; |
|
|
34 |
|
|
|
35 |
% Set previous layer size |
|
|
36 |
prevLayerSize = netconfig.layersizes{d}; |
|
|
37 |
end |
|
|
38 |
|
|
|
39 |
end |