-
Notifications
You must be signed in to change notification settings - Fork 5
/
Copy pathclassification_tengine.pbtxt
executable file
·75 lines (69 loc) · 2.63 KB
/
classification_tengine.pbtxt
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
# MediaPipe graph that performs object detection with TensorFlow Lite on CPU.
# Used in the examples in
# mediapipe/examples/desktop/object_detection:object_detection_cpu.
# Images on CPU coming into and out of the graph.
input_stream: "input_frame"
output_stream: "classifications"
# Transforms the input image on CPU to a 320x320 image. To scale the image, by
# default it uses the STRETCH scale mode that maps the entire input image to the
# entire transformed image. As a result, image aspect ratio may be changed and
# objects in the image may be deformed (stretched or squeezed), but the object
# detection model used in this graph is agnostic to that deformation.
node {
calculator: "ImageTransformationCalculator"
input_stream: "IMAGE:input_frame"
output_stream: "IMAGE:transformed_input_frame"
node_options: {
[type.googleapis.com/mediapipe.ImageTransformationCalculatorOptions] {
output_width: 256
output_height: 256
}
}
}
# Converts the transformed input image on CPU into an image tensor stored as a
# TfLiteTensor.
node {
calculator: "TengineConverterCalculator"
input_stream: "IMAGE:transformed_input_frame"
output_stream: "ARRAYS:image_tensor"
node_options: {
[type.googleapis.com/mediapipe.TengineConverterCalculatorOptions] {
tensor_mean: {val1:123.7 val2:116.3 val3:103.5}
tensor_scale: {val1:0.017124754 val2:0.017507003 val3:0.017429194}
}
}
}
# Runs a TensorFlow Lite model on CPU that takes an image tensor and outputs a
# vector of tensors representing, for instance, detection boxes/keypoints and
# scores.resnet18_uint8.tmfile
node {
calculator: "TengineInferenceCalculator"
input_stream: "ARRAYS:image_tensor"
output_stream: "ARRAYS:classification_tensors"
output_stream: "TENSOR_SHAPE:tensor_shapes"
output_stream: "QUANT_PARAM:quant_param"
node_options: {
[type.googleapis.com/mediapipe.TengineInferenceCalculatorOptions] {
model_path: "../models/resnet18_uint8.tmfile"
data_type: "uint8"
output_num: 1
max_dim: 4
tengine_backend: "timvx"
}
}
}
# Decodes the detection tensors generated by the TensorFlow Lite model, based on
# the SSD anchors and the specification in the options, into a vector of
# detections. Each detection describes a detected object.
node {
calculator: "TengineTensorsToClassificationsCalculator"
input_stream: "ARRAYS:classification_tensors"
input_stream: "TENSOR_SHAPE:tensor_shapes"
input_stream: "QUANT_PARAM:quant_param"
output_stream: "CLASSIFICATION:classifications"
node_options: {
[type.googleapis.com/mediapipe.TengineTensorsToClassificationsCalculatorOptions] {
data_type: "uint8"
}
}
}