a b/combinedDeepLearningActiveContour/functions/stackedAEPredict.m
1
function [pred] = stackedAEPredict(theta, inputSize, hiddenSize, numClasses, netconfig, data)
2
3
% stackedAEPredict: Takes a trained theta and a test data set,
4
% and returns the predicted labels for each example.
5
6
% theta: trained weights from the autoencoder
7
% visibleSize: the number of input units
8
% hiddenSize:  the number of hidden units *at the 2nd layer*
9
% numClasses:  the number of categories
10
% data: Our matrix containing the training data as columns.  So, data(:,i) is the i-th training example.
11
12
% Your code should produce the prediction matrix
13
% pred, where pred(i) is argmax_c P(y(c) | x(i)).
14
15
%% Unroll theta parameter
16
17
% We first extract the part which compute the softmax gradient
18
mrTheta = reshape(theta(1:hiddenSize*numClasses), numClasses, hiddenSize);
19
20
% Extract out the "stack"
21
stack = params2stack(theta(hiddenSize*numClasses+1:end), netconfig);
22
23
%% ---------- YOUR CODE HERE --------------------------------------
24
%  Instructions: Compute pred using theta assuming that the labels start
25
%                from 1.
26
27
depth = numel(stack);
28
z = cell(depth+1,1);
29
a = cell(depth+1, 1);
30
a{1} = data;
31
32
for layer = (1:depth)
33
  z{layer+1} = stack{layer}.w * a{layer} + repmat(stack{layer}.b, [1, size(a{layer},2)]);
34
  a{layer+1} = sigmoid(z{layer+1});
35
end
36
37
z_mr = mrTheta * a{depth+1};
38
pred=sigmoid(z_mr)>.5;
39
40
% -----------------------------------------------------------
41
42
end
43
44
45
% You might find this useful
46
function sigm = sigmoid(x)
47
    sigm = 1 ./ (1 + exp(-x));
48
end