-
Notifications
You must be signed in to change notification settings - Fork 14
/
trainAndValidateELM.m
78 lines (71 loc) · 2.84 KB
/
trainAndValidateELM.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
function accuracy = trainAndValidateELM(PHI,Y,featIxes,nFolds,nELM,nUnits)
% This function trains and validate an ELM classifier with k-fold
% cross-validation
%
% Inputs: PHI <- array of training inputs with size = num. patterns x num. features
% Y <- array of training targets with size = num. patterns x num. categories
% (for each i-th column of trY only the entry relative to the correct category is 1)
% featIxes <- features selected (they are columns of PHI)
% nFolds <- num. folds for cross validation
% nELM <- num. ELM in the ensemble
% nUnits <- num. hidden units of ELM
%
% Output:
% accuracy <- accuracy of the predictions of the cross-validated ELM ensemble
%
%
%
% Copyright 2015 Riccardo Taormina ([email protected]),
% Gulsah Karakaya ([email protected];),
% Stefano Galelli ([email protected]),
% and Selin Damla Ahipasaoglu ([email protected];.
%
% Please refer to README.txt for further information.
%
%
% This file is part of Matlab-Multi-objective-Feature-Selection.
%
% Matlab-Multi-objective-Feature-Selection is free software: you can redistribute
% it and/or modify it under the terms of the GNU General Public License
% as published by the Free Software Foundation, either version 3 of the
% License, or (at your option) any later version.
%
% This code is distributed in the hope that it will be useful,
% but WITHOUT ANY WARRANTY; without even the implied warranty of
% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
% GNU General Public License for more details.
%
% You should have received a copy of the GNU General Public License
% along with MATLAB_IterativeInputSelection.
% If not, see <http://www.gnu.org/licenses/>.
%
% initialize
Yhat = zeros(size(Y,1),1);
accuracies = zeros(1,nELM) + Inf;
for j = 1 : nELM
% k-fold cross validation
lData = size(Y,1);
lFold = floor(lData/nFolds);
for i = 1 : nFolds
% select trainind and validation data
ix1 = (i-1)*lFold+1;
if i == nFolds
ix2 = lData;
else
ix2 = i*lFold;
end
valIxes = ix1:ix2; % select the validation chunk
trIxes = setdiff(1:lData,valIxes); % obtain training indexes by set difference
% create datasets
trX = PHI(trIxes,featIxes); trY = Y(trIxes,:);
valX = PHI(valIxes,featIxes);
% train and test ELM
[~,Yhat(valIxes)] =...
ELMclassifier(trX', trY', valX', nUnits);
end
% compute accuracy after cross-validaiton
[~,temp] = max(Y');
Y_ = temp';
accuracies(j) = computeAccuracy(Y_,Yhat);
end
accuracy = mean(accuracies);