Switch to unified view

a b/Semantic Features/GetXws.m
1
function [XwsRaw numSegments] = GetXws(instanceID)
2
%GetXws Gets the X matrix of feature data from the weak segmentations
3
%   Just reads in data from CSVs created by Patrick's extraction software
4
5
%get features from our weak segmentors %FYI 4 Otsu and 6 Region Growing
6
7
fprintf('Reading calculated feature data from text files.\n');
8
featureDirectory = 'C:\Users\esmith2\Documents\Ethans Code\gitCode\Image features calculation code\Features for Production\2K image dataset\';
9
%featureDirectory = 'C:\Users\esmith2\Documents\Ethans Code\gitCode\Image features calculation code\Features for Production\6K org image dataset\';
10
%featureDirectory = 'C:\Ethan\repos\Image features calculation code\Features for Production\\6K org image dataset\';
11
12
%get otsu features
13
%[markovValues markovHeaders]
14
fprintf('Reading Otsu segmentation features\n');
15
fprintf('\tReading intensity values.\n');
16
[intensityValuesO intensityHeadersO intensityDCMOrderO] = parseCSV(strcat(featureDirectory, 'otsu_nodules_intensity_features.txt'));
17
fprintf('\tReading gabor values.\n');
18
[gaborValuesO gaborHeadersO gaborDCMOrderO] = parseCSV(strcat(featureDirectory, 'otsu_nodules_gab_features.txt'));
19
fprintf('\tReading haralick values.\n');
20
[haralickValuesO haralickHeadersO haralickDCMOrderO] = parseCSV(strcat(featureDirectory, 'otsu_nodules_har_features.txt'));
21
fprintf('\tReading shape values.\n');
22
[shapeValuesO shapeHeadersO shapeDCMOrderO] = parseCSV(strcat(featureDirectory, 'otsu_nodules_shape_features.txt'));
23
24
%get region grown features and horizontally concatenate them. 
25
%[markovValues markovHeaders]
26
fprintf('Reading Region Growing segmentation features\n');
27
fprintf('\tReading intensity values.\n');
28
[intensityValuesRG intensityHeadersRG intensityDCMOrderRG] = parseCSV(strcat(featureDirectory, 'rg_nodules_intensity_features.txt'));
29
fprintf('\tReading gabor values.\n');
30
[gaborValuesRG gaborHeadersRG gaborDCMOrderRG] = parseCSV(strcat(featureDirectory, 'rg_nodules_gab_features.txt'));
31
fprintf('\tReading haralick values.\n');
32
[haralickValuesRG haralickHeadersRG haralickDCMOrderRG] = parseCSV(strcat(featureDirectory, 'rg_nodules_har_features.txt'));
33
fprintf('\tReading shape values.\n');
34
[shapeValuesRG shapeHeadersRG shapeDCMOrderRG] = parseCSV(strcat(featureDirectory, 'rg_nodules_shape_features.txt'));
35
36
%concatenate
37
intensityValues = [intensityValuesO, intensityValuesRG];
38
gaborValues = [gaborValuesO, gaborValuesRG];
39
haralickValues = [haralickValuesO, haralickValuesRG];
40
shapeValues = [shapeValuesO, shapeValuesRG];
41
42
%This should happen before the reshaping of the matrices I think
43
%Rearrange the calculated features so they match with the appropriate rating
44
fprintf('Reorganizing data to match ratings.\n');
45
newRowOrder = 0;
46
for i = 1:size(instanceID)
47
    row = find( instanceID(i) == intensityDCMOrderO);
48
    if ~isempty(row)
49
        newRowOrder = vertcat(newRowOrder, row); 
50
    end
51
end
52
newRowOrder = newRowOrder(2:end,:);
53
54
%apply reordering
55
intensityValues     = intensityValues(newRowOrder,:);
56
gaborValues         = gaborValues(newRowOrder,:);
57
haralickValues      = haralickValues(newRowOrder,:);
58
shapeValues         = shapeValues(newRowOrder,:);
59
60
XwsRaw = [intensityValues, gaborValues, haralickValues, shapeValues]; 
61
62
%get number of segments
63
%Count Otsu segments
64
i = 1;
65
while intensityHeadersO{1}(end) == intensityHeadersO{i+1}(end)
66
    i = i+1;
67
end
68
numSegments = length(intensityHeadersO)/i;
69
%count region growing segments
70
i = 1; 
71
while intensityHeadersRG{1}(end) == intensityHeadersRG{i+1}(end)
72
    i = i+1;
73
end
74
numSegments = numSegments + length(intensityHeadersRG)/i;
75
76
%unused method of getting the largest boundary from each slice
77
%largeInstances = intensityValues(:,end);
78
79
end