Switch to unified view

a b/combinedDeepLearningActiveContour/train_DL_ROI.m
1
%% automatic detection of heart
2
clc;
3
clear all;
4
close all;
5
addpath('functions');
6
%% STEP 0: 
7
% parameters
8
patchsize = 32;
9
visibleSize = patchsize*patchsize;   % number of input units 
10
hiddenSizeL1 = 100;     % number of hidden units 
11
hiddenSizeL2=100;
12
sparsityParam1 = 0.01;   % desired average activation of the hidden units.
13
lambda = 3e-3;       % weight decay parameter       
14
beta = 3;            % weight of sparsity penalty term       
15
outputSize=visibleSize; % number of output units
16
%%======================================================================
17
%% STEP 1: laod training inputs and labels from mat files
18
load matFiles/training_data; 
19
train_input=sampleIMAGES(t_I,patchsize);
20
train_labels=sampleIMAGES(t_yROI,patchsize);
21
%% train sparse Auto Encoder 1
22
%  Randomly initialize the parameters
23
saeTheta1 = initializeParameters(hiddenSizeL1, visibleSize);
24
25
%  Use minFunc to minimize the function
26
addpath minFunc/
27
options.Method = 'lbfgs'; % Here, we use L-BFGS to optimize our cost
28
                          % function. Generally, for minFunc to work, you
29
                          % need a function pointer with two outputs: the
30
                          % function value and the gradient. In our problem,
31
                          % sparseAutoencoderCost.m satisfies this.
32
options.maxIter = 400;    % Maximum number of iterations of L-BFGS to run 
33
options.display = 'on';
34
35
[sae1OptTheta, cost] = minFunc( @(p) sparseAutoencoderCost(p, ...
36
                                   visibleSize, hiddenSizeL1, ...
37
                                   lambda, sparsityParam1, ...
38
                                   beta, train_input), ...
39
                                   saeTheta1, options);
40
%% STEP 5: Visualization of AE1
41
W1 = reshape(sae1OptTheta(1:hiddenSizeL1*visibleSize), hiddenSizeL1, visibleSize);
42
display_network(W1', 12); 
43
%% compute activations from layer 1
44
[sae1Features] = feedForwardAutoencoder(sae1OptTheta, hiddenSizeL1, ...
45
                                        visibleSize, train_input);
46
47
%% train sparse Auto Encoder 2                                   
48
%  Randomly initialize the parameters
49
sae2Theta = initializeParameters(hiddenSizeL2, hiddenSizeL1);
50
sparsityParam2=1e-1;
51
lambda2 = 3e-3;       % weight decay parameter       
52
beta2 = 3;            % weight of sparsity penalty term       
53
[sae2OptTheta, costL2] = minFunc( @(p) sparseAutoencoderCost(p, ...
54
                                  hiddenSizeL1, hiddenSizeL2, ...
55
                                  lambda2, sparsityParam2, ...
56
                                  beta2, sae1Features), ...
57
                                  sae2Theta, options);
58
                              
59
W2 = reshape(sae2OptTheta(1:hiddenSizeL2*hiddenSizeL1), hiddenSizeL2, hiddenSizeL1);
60
%display_network(W2', 12);                              
61
%% compute activation from layer 2
62
[sae2Features] = feedForwardAutoencoder(sae2OptTheta, hiddenSizeL2, ...
63
                                        hiddenSizeL1, sae1Features);
64
65
%% train multi outputs logstic regression                                    
66
lambda_mr=1e-4;
67
options_mr.maxIter = 100;
68
trainLabels=train_labels;
69
mrModel = mrTrain(hiddenSizeL2, outputSize, lambda_mr, ...
70
                            sae2Features, trainLabels, options_mr);
71
72
saeMultRegOptTheta = mrModel.optTheta(:);
73
74
%% fine tuning
75
76
% Initialize the stack using the parameters learned
77
stack = cell(2,1);
78
inputSize=visibleSize;
79
80
stack{1}.w = reshape(sae1OptTheta(1:hiddenSizeL1*inputSize), ...
81
                     hiddenSizeL1, inputSize);
82
stack{1}.b = sae1OptTheta(2*hiddenSizeL1*inputSize+1:2*hiddenSizeL1*inputSize+hiddenSizeL1);
83
84
stack{2}.w = reshape(sae2OptTheta(1:hiddenSizeL2*hiddenSizeL1), ...
85
                     hiddenSizeL2, hiddenSizeL1);
86
stack{2}.b = sae2OptTheta(2*hiddenSizeL2*hiddenSizeL1+1:2*hiddenSizeL2*hiddenSizeL1+hiddenSizeL2);
87
88
% Initialize the parameters for the deep model
89
[stackparams, netconfig] = stack2params(stack);
90
stackedAETheta = [ saeMultRegOptTheta ; stackparams ];
91
92
93
[stackedAEOptTheta, loss] = minFunc( @(x) stackedAECost(x, ...
94
      inputSize, hiddenSizeL2, outputSize, netconfig, ...
95
      lambda, train_input, train_labels), ...
96
      stackedAETheta, options);
97
%% save results
98
%  save ROI_V32_H100_rho_0p01
99
%% test 
100
% load test data
101
% load matFiles/validation_data; 
102
% [xIsize,yIsize,zIsize]=size(t_I);
103
% test_input=sampleIMAGES(t_I,patchsize);
104
% 
105
% [pyroi] = stackedAEPredict(stackedAEOptTheta, inputSize, hiddenSizeL2, ...
106
%                           outputSize, netconfig, test_input);
107
%                                        
108
% yroi_h=reshape(pyroi,patchsize,patchsize,[]);
109
% %%
110
% for k=1:size(yroi_h,3)
111
%     y1=yroi_h(:,:,k)';
112
%     y1=imresize(y1,xIsize/patchsize);
113
%     subplot(4,6,k)
114
%     imshow(y1)
115
%     I1=t_I(:,:,k);
116
%     ymask(:,:,k)=y1.*I1;
117
% end
118
% disImgs(ymask,19);