|
a |
|
b/Ensemble Learning/AdaBoost/app1.m |
|
|
1 |
close all; clear; clc; |
|
|
2 |
|
|
|
3 |
load dataDWT.mat; |
|
|
4 |
%load dataCSP.mat; |
|
|
5 |
%load dataAR.mat; |
|
|
6 |
%load dataPSD.mat; |
|
|
7 |
|
|
|
8 |
Y(Y==2) = -1; |
|
|
9 |
|
|
|
10 |
N=size(X,1); |
|
|
11 |
trnX = X(1:N, :); |
|
|
12 |
trnY = Y(1:N); |
|
|
13 |
|
|
|
14 |
iter = 30; |
|
|
15 |
abClassifier = initAdaBoost(iter); |
|
|
16 |
|
|
|
17 |
N = size(trnX, 1); % Number of training samples |
|
|
18 |
sampleWeight = repmat(1/N, N, 1); |
|
|
19 |
|
|
|
20 |
for t = 1:iter |
|
|
21 |
weakClassifier = buildStump(trnX, trnY, sampleWeight); |
|
|
22 |
|
|
|
23 |
abClassifier.WeakClas{t} = weakClassifier; |
|
|
24 |
abClassifier.nWC = t; |
|
|
25 |
% Compute the weight of this classifier |
|
|
26 |
abClassifier.Weight(t) = 0.5*log((1-weakClassifier.error)/weakClassifier.error); |
|
|
27 |
weakClassifier.error |
|
|
28 |
% Update sample weight |
|
|
29 |
label = predStump(trnX, weakClassifier); |
|
|
30 |
tmpSampleWeight = -1*abClassifier.Weight(t)*(trnY.*label); % N x 1 |
|
|
31 |
tmpSampleWeight = sampleWeight.*exp(tmpSampleWeight); % N x 1 |
|
|
32 |
|
|
|
33 |
sampleWeight = tmpSampleWeight./sum(tmpSampleWeight); % Normalized |
|
|
34 |
|
|
|
35 |
% Predict on training data |
|
|
36 |
[ttt, abClassifier.trnErr(t)] = predAdaBoost(abClassifier, trnX, trnY); |
|
|
37 |
|
|
|
38 |
fprintf('\tIteration %d, Training error %f\n', t, abClassifier.trnErr(t)); |
|
|
39 |
end |
|
|
40 |
|
|
|
41 |
trnError = abClassifier.trnErr; |
|
|
42 |
plot(1:iter, trnError); |
|
|
43 |
|