Switch to unified view

a b/combinedDeepLearningActiveContour/functions/stack2params.m
1
function [params, netconfig] = stack2params(stack)
2
3
% Converts a "stack" structure into a flattened parameter vector and also
4
% stores the network configuration. This is useful when working with
5
% optimization toolboxes such as minFunc.
6
%
7
% [params, netconfig] = stack2params(stack)
8
%
9
% stack - the stack structure, where stack{1}.w = weights of first layer
10
%                                    stack{1}.b = weights of first layer
11
%                                    stack{2}.w = weights of second layer
12
%                                    stack{2}.b = weights of second layer
13
%                                    ... etc.
14
15
16
% Setup the compressed param vector
17
params = [];
18
for d = 1:numel(stack)
19
    
20
    % This can be optimized. But since our stacks are relatively short, it
21
    % is okay
22
    params = [params ; stack{d}.w(:) ; stack{d}.b(:) ];
23
    
24
    % Check that stack is of the correct form
25
    assert(size(stack{d}.w, 1) == size(stack{d}.b, 1), ...
26
        ['The bias should be a *column* vector of ' ...
27
         int2str(size(stack{d}.w, 1)) 'x1']);
28
    if d < numel(stack)
29
        assert(size(stack{d}.w, 1) == size(stack{d+1}.w, 2), ...
30
            ['The adjacent layers L' int2str(d) ' and L' int2str(d+1) ...
31
             ' should have matching sizes.']);
32
    end
33
    
34
end
35
36
if nargout > 1
37
    % Setup netconfig
38
    if numel(stack) == 0
39
        netconfig.inputsize = 0;
40
        netconfig.layersizes = {};
41
    else
42
        netconfig.inputsize = size(stack{1}.w, 2);
43
        netconfig.layersizes = {};
44
        for d = 1:numel(stack)
45
            netconfig.layersizes = [netconfig.layersizes ; size(stack{d}.w,1)];
46
        end
47
    end
48
end
49
50
end