forked from openvinotoolkit/openvino
-
Notifications
You must be signed in to change notification settings - Fork 0
/
plugin.hpp
80 lines (64 loc) · 4.22 KB
/
plugin.hpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
// Copyright (C) 2018-2023 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//
///////////////////////////////////////////////////////////////////////////////////////////////////
#pragma once
#include <map>
#include <vector>
#include <string>
#include <list>
#include <cpp_interfaces/interface/ie_iplugin_internal.hpp>
#include <cpp_interfaces/interface/ie_internal_plugin_config.hpp>
#include "utils/log_util.hpp"
#include "common.hpp"
#include "utils/config.hpp"
#ifdef MULTIUNITTEST
#define MOCKTESTMACRO virtual
#define MultiDevicePlugin MockMultiDevicePlugin
#else
#define MOCKTESTMACRO
#endif
namespace MultiDevicePlugin {
class MultiDeviceInferencePlugin : public InferenceEngine::IInferencePlugin {
public:
MultiDeviceInferencePlugin();
~MultiDeviceInferencePlugin() = default;
InferenceEngine::IExecutableNetworkInternal::Ptr LoadExeNetworkImpl(const InferenceEngine::CNNNetwork& network,
const std::map<std::string, std::string>& config) override;
ov::SoPtr<InferenceEngine::IExecutableNetworkInternal> LoadNetwork(const std::string& modelPath,
const std::map<std::string, std::string>& config) override;
void SetConfig(const std::map<std::string, std::string>& config) override;
InferenceEngine::Parameter GetConfig(const std::string& name, const std::map<std::string, InferenceEngine::Parameter> & options) const override;
InferenceEngine::QueryNetworkResult QueryNetwork(const InferenceEngine::CNNNetwork& network,
const std::map<std::string, std::string>& config) const override;
InferenceEngine::Parameter GetMetric(const std::string& name,
const std::map<std::string, InferenceEngine::Parameter>& options) const override;
MOCKTESTMACRO std::vector<MultiDevicePlugin::DeviceInformation> ParseMetaDevices(const std::string & devicesRequestsCfg,
const std::map<std::string, std::string> & config) const;
MOCKTESTMACRO std::string GetDeviceList(const std::map<std::string, std::string>& config) const;
MOCKTESTMACRO std::list<DeviceInformation> GetValidDevice(const std::vector<DeviceInformation>& metaDevices,
const std::string& networkPrecision = METRIC_VALUE(FP32));
MOCKTESTMACRO DeviceInformation SelectDevice(const std::vector<DeviceInformation>& metaDevices,
const std::string& networkPrecision = METRIC_VALUE(FP32),
unsigned int priority = 0);
void UnregisterPriority(const unsigned int& priority, const std::string& deviceName);
void RegisterPriority(const unsigned int& priority, const std::string& deviceName);
protected:
std::map<std::string, std::string> GetSupportedConfig(const std::map<std::string, std::string>& config,
const MultiDevicePlugin::DeviceName & deviceName) const;
private:
InferenceEngine::IExecutableNetworkInternal::Ptr LoadNetworkImpl(const std::string& modelPath,
InferenceEngine::CNNNetwork network,
const std::map<std::string, std::string>& config,
const std::string &networkPrecision = METRIC_VALUE(FP32));
PluginConfig _pluginConfig;
std::vector<DeviceInformation> FilterDevice(const std::vector<DeviceInformation>& metaDevices,
const std::map<std::string, std::string>& config);
std::vector<DeviceInformation> FilterDeviceByNetwork(const std::vector<DeviceInformation>& metaDevices,
InferenceEngine::CNNNetwork network);
std::string GetLogTag() const noexcept;
static std::mutex _mtx;
static std::map<unsigned int, std::list<std::string>> _priorityMap;
std::string _LogTag;
};
} // namespace MultiDevicePlugin