-
Notifications
You must be signed in to change notification settings - Fork 4
/
Copy pathmicro.cc
46 lines (42 loc) · 1.48 KB
/
micro.cc
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
#include "tensorflow/lite/micro/all_ops_resolver.h"
#include "tensorflow/lite/micro/micro_interpreter.h"
#include "tensorflow/lite/schema/schema_generated.h"
#include "model_fc.h"
#include <stdio.h>
const int tensor_arena_size = 8000;
uint8_t tensor_arena[tensor_arena_size];
int main() {
const tflite::Model* model = tflite::GetModel(model_fc_tflite);
if (model->version() != TFLITE_SCHEMA_VERSION) {
printf("Model provided is schema version %d not equal "
"to supported version %d.\n",
model->version(), TFLITE_SCHEMA_VERSION);
}
tflite::AllOpsResolver resolver;
tflite::MicroInterpreter interpreter(model, resolver, tensor_arena,
tensor_arena_size);
TfLiteStatus allocate_status = interpreter.AllocateTensors();
if (allocate_status != kTfLiteOk) {
printf("unable to allocate tensors\n");
return 1;
}
TfLiteTensor* model_input = interpreter.input(0);
if (model_input == nullptr) {
printf("unable to allocate input\n");
return 1;
}
if (model_input->dims->size != 2 || model_input->type != kTfLiteFloat32) {
printf("input mismatch\n");
return 1;
}
model_input->data.f[0] = 700.0;
model_input->data.f[1] = 7.3;
TfLiteStatus invoke_status = interpreter.Invoke();
if (invoke_status != kTfLiteOk) {
printf("Invoke error: status %d\n", invoke_status);
}
TfLiteTensor* output = interpreter.output(0);
float y = output->data.f[0];
printf("output %f\n", (double)y);
return 0;
}