|
a |
|
b/combinedDeepLearningActiveContour/functions/mrCost.m |
|
|
1 |
% softmaxCost.m |
|
|
2 |
function [cost, grad] = mrCost(theta, numOuts, inputSize, lambda, data, labels) |
|
|
3 |
|
|
|
4 |
% numOuts - the number of number of outputs |
|
|
5 |
% inputSize - the size N of the input vector |
|
|
6 |
% lambda - weight decay parameter |
|
|
7 |
% data - the N x M input matrix, where each column data(:, i) corresponds to |
|
|
8 |
% a single test set |
|
|
9 |
% labels - an M x 1 matrix containing the labels corresponding for the input data |
|
|
10 |
% |
|
|
11 |
|
|
|
12 |
% Unroll the parameters from theta |
|
|
13 |
theta = reshape(theta, numOuts, inputSize); |
|
|
14 |
|
|
|
15 |
|
|
|
16 |
numCases = size(data, 2); |
|
|
17 |
|
|
|
18 |
%groundTruth = full(sparse(labels, 1:numCases, 1)); |
|
|
19 |
cost = 0; |
|
|
20 |
|
|
|
21 |
thetagrad = zeros(numOuts, inputSize); |
|
|
22 |
|
|
|
23 |
%% ---------- YOUR CODE HERE -------------------------------------- |
|
|
24 |
% Instructions: Compute the cost and gradient for softmax regression. |
|
|
25 |
% You need to compute thetagrad and cost. |
|
|
26 |
% The groundTruth matrix might come in handy. |
|
|
27 |
|
|
|
28 |
[nfeatures, nsamples] = size(data); |
|
|
29 |
|
|
|
30 |
zi = theta * data; |
|
|
31 |
%zi=zi+.0005*randn(size(zi)); |
|
|
32 |
hzi = 1./(1+exp(-zi)); |
|
|
33 |
temp1 = labels .* log(hzi); |
|
|
34 |
temp2 = (1-labels) .* log(1-hzi); |
|
|
35 |
|
|
|
36 |
cost = - sum(sum(temp1+temp2)) ./ nsamples; |
|
|
37 |
cost = cost + sum(sum(theta .^ 2)) .* lambda ./ 2; |
|
|
38 |
|
|
|
39 |
temp3 = labels - hzi; |
|
|
40 |
temp4 = temp3 * data'; |
|
|
41 |
thetagrad = - temp4 ./ nsamples; |
|
|
42 |
thetagrad = thetagrad + lambda .* theta; |
|
|
43 |
|
|
|
44 |
% ------------------------------------------------------------------ |
|
|
45 |
% Unroll the gradient matrices into a vector for minFunc |
|
|
46 |
grad = [thetagrad(:)]; |
|
|
47 |
end |