-
Notifications
You must be signed in to change notification settings - Fork 4
/
Copy pathKNN.c
157 lines (133 loc) · 3.93 KB
/
KNN.c
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
#include "ELM.h"
#ifdef KNN
#include "KNN.h"
#include <stdio.h>
#include "math.h"
#include <stdlib.h> // To check if works on STM32
#include <string.h> // To check if works on STM32
#ifdef DS_TEST
#ifdef REGRESSION
float (*pRegress)(float X[]) = knn_regression;
#else
int (*pClassf)(float X[]) = knn_classification;
#endif
#endif
struct neighbour{
int id;
float score;
#ifdef REGRESSION
float label;
#endif
};
int struct_cmp_by_score_dec(const void *, const void *);
//int regressionLabels[N_CLASS]; //Attention!!!!!!!!
#ifndef REGRESSION
int knn_classification(float X[]) {
// KNN
// https://www.geeksforgeeks.org/weighted-k-nn/
struct neighbour neighbours[N_TRAIN];
int j;
for(j=0; j < N_TRAIN; j++){
neighbours[j].id = j;
float acc=0;
bool skip=false;
int k;
for(k=0; k < N_FEATURE; k++) {
acc+=(X[k] - X_train[j][k])*(X[k] - X_train[j][k]);
if (acc > 10000000) {
neighbours[j].score = 0;
skip=true;
break;
}
}
if (!skip){
acc = sqrt(acc);
if (acc < 0.00000001) {
neighbours[j].score = 100000000;
} else {
neighbours[j].score = 1 / acc;
}
}
}
qsort(neighbours, N_TRAIN, sizeof(struct neighbour), struct_cmp_by_score_dec);
{
int n;
float scores[N_CLASS];
memset(scores, 0, N_CLASS*sizeof(float));
for(n=0; n<K; n++) {
scores[y_train[neighbours[n].id]] += neighbours[n].score;
}
float bestScore=0;
int bestClass;
for(n=0; n<N_CLASS; n++) {
if (scores[n] > bestScore) {
bestScore = scores[n];
bestClass = n;
}
}
return(bestClass);
}
}
#endif
#ifdef REGRESSION
float knn_regression(float X[]) {
// KNN
// https://www.geeksforgeeks.org/weighted-k-nn/
struct neighbour neighbours[N_TRAIN];
int j;
for(j=0; j < N_TRAIN; j++){
neighbours[j].id = j;
// if needed, could be used also for classification
neighbours[j].label = y_train[j];
float acc=0;
bool skip=false;
int k;
for(k=0; k < N_FEATURE; k++) {
acc+=(X[k] - X_train[j][k])*(X[k] - X_train[j][k]);
if (acc > 10000000) {
neighbours[j].score = 0;
skip=true;
break;
}
}
if (!skip){
acc = sqrt(acc);
if (acc < 0.00000001) {
neighbours[j].score = 100000000;
} else {
neighbours[j].score = 1 / acc;
}
}
}
qsort(neighbours, N_TRAIN, sizeof(struct neighbour), struct_cmp_by_score_dec);
{
float totalScore = 0;
float pred = 0;
int n = 0;
for(n=0; n<K; n++) {
pred += (neighbours[n].label * neighbours[n].score);
totalScore += neighbours[n].score;
}
pred /= totalScore;
#ifdef MINMAX_NORMALIZATION
pred = pred / S_Y;
#elif defined(STANDARD_SCALING)
pred = pred * S_Y + U_Y;
#endif
printf("Prediction: %f", pred);
return pred;
}
}
#endif
/* qsort struct comparision function (price float field) */
int struct_cmp_by_score_dec(const void *a, const void *b)
{
struct neighbour *ia = (struct neighbour *)a;
struct neighbour *ib = (struct neighbour *)b;
return -(int)(100000.f*ia->score - 100000.f*ib->score);
/* float comparison: returns negative if b > a
and positive if a > b. We multiplied result by 100.0
to preserve decimal fraction */
//Decreasing
}
#endif