|
a |
|
b/classification/RUSboost/runSRB.m |
|
|
1 |
function [rusTree, istrain, istest] = runSRB(X,Y) |
|
|
2 |
%RUNSRB Summary of this function goes here |
|
|
3 |
% Detailed explanation goes here |
|
|
4 |
|
|
|
5 |
|
|
|
6 |
%% Transform data into a usable form |
|
|
7 |
|
|
|
8 |
%Semantic labels |
|
|
9 |
label = 'Sphericity'; |
|
|
10 |
|
|
|
11 |
%Show distribution for semantic label |
|
|
12 |
fprintf('%s%s\n','Tabulation for: ', label); |
|
|
13 |
tabulate(Y); |
|
|
14 |
|
|
|
15 |
|
|
|
16 |
%create weak learner template for RUSBoosting to build an ensemble on |
|
|
17 |
% TODO: CHANGE THESE PARAMS TO SOMETHING ELSE |
|
|
18 |
t = ClassificationTree.template(); |
|
|
19 |
|
|
|
20 |
%% Build testing and training sets |
|
|
21 |
|
|
|
22 |
%build training and testing sets |
|
|
23 |
|
|
|
24 |
part = cvpartition(Y,'holdout',0.25); |
|
|
25 |
istrain = training(part); |
|
|
26 |
istest = test(part); |
|
|
27 |
|
|
|
28 |
|
|
|
29 |
%% Run RUSBoosting |
|
|
30 |
|
|
|
31 |
%build training sets |
|
|
32 |
trainingX = X(istrain,:); |
|
|
33 |
trainingY = Y(istrain); |
|
|
34 |
|
|
|
35 |
%Run a decision tree |
|
|
36 |
%tree = ClassificationTree.fit(trainingX,trainingY); |
|
|
37 |
|
|
|
38 |
%run the RUSboost on a semantic label |
|
|
39 |
rusTree = fitensemble(trainingX,trainingY,'RUSBoost',1000,'Tree',... |
|
|
40 |
'LearnRate',1,'nprint',100); |
|
|
41 |
%% Plot error rate |
|
|
42 |
|
|
|
43 |
%build testing sets |
|
|
44 |
testingX = X(istest,:); |
|
|
45 |
testingY = Y(istest); |
|
|
46 |
|
|
|
47 |
figure; |
|
|
48 |
plot(loss(rusTree,testingX,testingY,'mode','cumulative')); |
|
|
49 |
grid on; |
|
|
50 |
xlabel('Number of trees'); |
|
|
51 |
ylabel('Test classification error'); |
|
|
52 |
disp('end'); |
|
|
53 |
|
|
|
54 |
end |
|
|
55 |
|