-
Notifications
You must be signed in to change notification settings - Fork 2
Expand file tree
/
Copy pathmain.m
More file actions
executable file
·157 lines (122 loc) · 5.33 KB
/
main.m
File metadata and controls
executable file
·157 lines (122 loc) · 5.33 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
% author: Juan Haladjian
% contact: haladjia@in.tum.de
% this is a tutorial on activity recognition with wearable
% sensor data.
% this application classifies the gait of a cow into normal or abnormal.
% the abnormal gait has been collected while the cow walked with a plastic block attached
% to its hind left leg.
% both data sets have been collected with a motion sensor attached to the cow's
% left hind left leg.
%Note: remember to add the current directory to Matlab's path using:
%addpath(genpath('./'));
close all;
%% Load Data
importer = TableImporter();
normalDataTable = importer.importTable('data-normal.txt');
normalData = table2array(normalDataTable);
plotter = Plotter();
plotter.plotSignalBig(normalData(:,1),'Normal Cow Gait Data','Sample','Accelerometer-x');
abnormalDataTable = importer.importTable('data-abnormal.txt');
abnormalData = table2array(abnormalDataTable);
plotter.plotSignalBig(abnormalData(:,1),'Abnormal Cow Gait Data','Sample','Accelerometer-x');
data = normalData;
%Note: you can save the data in binary Matlab's .m format
%for faster loading next time with the command:
%Note:if the data was saved in binary .m format
%you can load it with:
%data = load('data');
%data = data.data;
%% Explore Frequency Domain
plotter.plotSpectrogram(data(:,1),'Spectrogram','Sample','Frequency [Hz]');
%% ARC
featuresNormalTable = createFeaturesTable(normalData);
featuresAbnormalTable = createFeaturesTable(abnormalData);
featuresNormalData = table2array(featuresNormalTable);
featuresAbnormalData = table2array(featuresAbnormalTable);
nRowsNormalData = size(featuresNormalData,1);
nRowsAbnormalData = size(featuresAbnormalData,1);
nRows = nRowsNormalData + nRowsAbnormalData;
nCols = size(featuresNormalData,2);
%% Labeling
featuresNormalData(:,nCols+1) = 1;%label normal data as 1
featuresAbnormalData(:,nCols+1) = 2;%label abnormal data as 2
featuresData = zeros(nRows,nCols+1);
featuresData(1:nRowsNormalData,:) = featuresNormalData;
featuresData(nRowsNormalData+1:end,:) = featuresAbnormalData;
featuresTable = array2table(featuresData);
featuresTable.Properties.VariableNames = [featuresNormalTable.Properties.VariableNames,'label'];
%% Normalize features
dataNormalizer = DataNormalizer();
dataNormalizer.fit(featuresTable);
featuresTable = dataNormalizer.normalize(featuresTable);
testIdxs = false(1,nRows);
testIdxs(1:100) = true;
testIdxs(end-100:end) = true;
trainTable = featuresTable(~testIdxs,:);
testTable = featuresTable(testIdxs,:);
%% Feature selection
% Note: if you get erros here double check that your features table does
% not contain the same value for every feature in a column
nFeatures = 20;
featureSelector = FeatureSelector();
bestFeatues = featureSelector.findBestFeatures(trainTable,nFeatures);
trainTable = featureSelector.selectFeatures(trainTable,bestFeatues);
testTable = featureSelector.selectFeatures(testTable,bestFeatues);
% Note: usually, more features lead to higher classification accuracies. However, too
% many features might overfit the classifier and will require more data to ensure the
% computed accuracy represents how the classifier would behave in real life.
% Furthermore, in a real life system, more than 30 features are
% unpractical, depending on how CPU intensive the features are and
% computational resources (CPU, memory, energy) available on the embedded
% device.
%% Train Classifier
%this classifiers uses a predefined algorithm (SVM) with a polynomial
%kernel. You can use Matlab's Classification Learner App to test further
%algorithms
trainer = Trainer();
trainer.train(trainTable);
%% Test Classifier
labels = trainer.test(testTable);
shouldBeLabels = table2array(testTable(:,end));
%Note: to test other algorithms, open the Classification Learner Tool in the
%Matlab-Toolbox and select the variable 'table'
%% Plot Results
confusionMatrix = confusionmat(shouldBeLabels,labels);
plotter.plotConfusionMatrix(confusionMatrix,["Normal","Abnormal"]);
function featuresTable = createFeaturesTable(data)
plotter = Plotter();
%% Low pass Filter
cutoff = 20/100;
[b,a] = butter(1,cutoff);
data(:,1:3) = filter(b,a,data(:,1:3));
plotter.plotSignalBig(data(:,1),'Filtered','Sample','Acceleration [g]');
%% Energy calculation (e.g. for peak detection)
energy = data(:,1).^2 + data(:,2).^2 + data(:,3).^2;
axis = plotter.plotSignalBig(energy,'Energy','Sample','Acceleration [g2]');
%% Event Detection
[peaks, peakLocations] = findpeaks(energy,'minPeakHeight',5,'minPeakDistance',80);
hold on;
plotter.plotPeaks(axis,peakLocations,peaks,'green')
%% Segmentation
segmentA = 60;
segmentB = 80;
segmentStartIdxs = peakLocations - segmentA;
segmentEndIdxs = peakLocations + segmentB;
nSegments = length(segmentStartIdxs);
%this step generates the start and end indices of the segments
%% Feature Extraction
featureExtractor = FeatureExtractor();
nFeatures = featureExtractor.nFeatures;
featuresTableArray = zeros(nSegments,nFeatures);
for i = 1 : length(segmentStartIdxs)
startIdx = max(1,segmentStartIdxs(i));
endIdx = min(length(data),segmentEndIdxs(i));
segment = data(startIdx:endIdx,1:6);
featureVector = featureExtractor.extractFeaturesForSegment(segment);
featuresTableArray(i,:) = featureVector;
end
featuresTable = array2table(featuresTableArray);
featuresTable.Properties.VariableNames = featureExtractor.featureNames;
%Note: here you could save the features using the save command for faster
%loading next time
end