Switch to unified view

a b/Ensemble Learning/AdaBoost/buildAdaBoost.m
1
function abClassifier = buildAdaBoost(trnX, trnY, iter, tstX, tstY)
2
if nargin < 4
3
    tstX = [];
4
    tstY = [];
5
end
6
abClassifier = initAdaBoost(iter);
7
8
N = size(trnX, 1); % Number of training samples
9
sampleWeight = repmat(1/N, N, 1);
10
11
for i = 1:iter
12
    weakClassifier = buildStump(trnX, trnY, sampleWeight);
13
    abClassifier.WeakClas{i} = weakClassifier;
14
    abClassifier.nWC = i;
15
    % Compute the weight of this classifier
16
    abClassifier.Weight(i) = 0.5*log((1-weakClassifier.error)/weakClassifier.error);
17
    % Update sample weight
18
    label = predStump(trnX, weakClassifier);
19
    tmpSampleWeight = -1*abClassifier.Weight(i)*(trnY.*label); % N x 1
20
    tmpSampleWeight = sampleWeight.*exp(tmpSampleWeight); % N x 1
21
    sampleWeight = tmpSampleWeight./sum(tmpSampleWeight); % Normalized
22
    
23
    % Predict on training data
24
    [ttt, abClassifier.trnErr(i)] = predAdaBoost(abClassifier, trnX, trnY);
25
    % Predict on test data
26
    if ~isempty(tstY)
27
        abClassifier.hasTestData = true;
28
        [ttt, abClassifier.tstErr(i)] = predAdaBoost(abClassifier, tstX, tstY);
29
    end
30
    % fprintf('\tIteration %d, Training error %f\n', i, abClassifier.trnErr(i));
31
end
32
end