ie_core.hpp
Go to the documentation of this file.
1 // Copyright (C) 2018-2020 Intel Corporation
2 // SPDX-License-Identifier: Apache-2.0
3 //
4 
5 /**
6  * @brief This is a header file for the Inference Engine Core class C++ API
7  *
8  * @file ie_core.hpp
9  */
10 #pragma once
11 
12 #include <map>
13 #include <memory>
14 #include <string>
15 #include <vector>
16 
17 #include "ie_version.hpp"
18 #include "ie_extension.h"
19 #include "ie_remote_context.hpp"
21 
22 namespace InferenceEngine {
23 
24 /**
25  * @brief This class represents Inference Engine Core entity.
26  *
27  * It can throw exceptions safely for the application, where it is properly handled.
28  */
29 class INFERENCE_ENGINE_API_CLASS(Core) {
30  class Impl;
31  std::shared_ptr<Impl> _impl;
32 
33 public:
34  /** @brief Constructs Inference Engine Core instance using XML configuration file with
35  * plugins description.
36  *
37  * See RegisterPlugins for more details.
38  *
39  * @param xmlConfigFile A path to .xml file with plugins to load from. If XML configuration file is not specified,
40  * then default Inference Engine plugins are loaded from the default plugin.xml file.
41  */
42  explicit Core(const std::string& xmlConfigFile = {});
43 
44  /**
45  * @brief Returns plugins version information
46  *
47  * @param deviceName Device name to identify plugin
48  * @return A vector of versions
49  */
50  std::map<std::string, Version> GetVersions(const std::string& deviceName) const;
51 
52 #ifdef ENABLE_UNICODE_PATH_SUPPORT
53  /**
54  * @brief Reads models from IR and ONNX formats
55  * @param modelPath path to model
56  * @param binPath path to data file
57  * For IR format (*.bin):
58  * * if path is empty, will try to read bin file with the same name as xml and
59  * * if bin file with the same name was not found, will load IR without weights.
60  * For ONNX format (*.onnx or *.prototxt):
61  * * binPath parameter is not used.
62  * @return CNNNetwork
63  */
64  CNNNetwork ReadNetwork(const std::wstring& modelPath, const std::wstring& binPath = {}) const;
65 #endif
66 
67  /**
68  * @brief Reads models from IR and ONNX formats
69  * @param modelPath path to model
70  * @param binPath path to data file
71  * For IR format (*.bin):
72  * * if path is empty, will try to read bin file with the same name as xml and
73  * * if bin file with the same name was not found, will load IR without weights.
74  * For ONNX format (*.onnx or *.prototxt):
75  * * binPath parameter is not used.
76  * @return CNNNetwork
77  */
78  CNNNetwork ReadNetwork(const std::string& modelPath, const std::string& binPath = {}) const;
79  /**
80  * @brief Reads models from IR and ONNX formats
81  * @param model string with model in IR or ONNX format
82  * @param weights shared pointer to constant blob with weights
83  * Reading ONNX models doesn't support loading weights from data blobs.
84  * If you are using an ONNX model with external data files, please use the
85  * `InferenceEngine::Core::ReadNetwork(const std::string& model, const Blob::CPtr& weights) const`
86  * function overload which takes a filesystem path to the model.
87  * For ONNX case the second parameter should contain empty blob.
88  * @return CNNNetwork
89  */
90  CNNNetwork ReadNetwork(const std::string& model, const Blob::CPtr& weights) const;
91 
92  /**
93  * @brief Creates an executable network from a network object.
94  *
95  * Users can create as many networks as they need and use
96  * them simultaneously (up to the limitation of the hardware resources)
97  *
98  * @param network CNNNetwork object acquired from Core::ReadNetwork
99  * @param deviceName Name of device to load network to
100  * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
101  * operation
102  * @return An executable network reference
103  */
105  const CNNNetwork& network, const std::string& deviceName,
106  const std::map<std::string, std::string>& config = {});
107 
108  /**
109  * @brief Registers extension
110  * @param extension Pointer to already loaded extension
111  */
112  void AddExtension(const IExtensionPtr& extension);
113 
114  /**
115  * @brief Creates an executable network from a network object within a specified remote context.
116  * @param network CNNNetwork object acquired from Core::ReadNetwork
117  * @param context Pointer to RemoteContext object
118  * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
119  * operation
120  * @return An executable network object
121  */
123  const CNNNetwork& network, RemoteContext::Ptr context,
124  const std::map<std::string, std::string>& config = {});
125 
126  /**
127  * @brief Registers extension for the specified plugin
128  *
129  * @param extension Pointer to already loaded extension
130  * @param deviceName Device name to identify plugin to add an executable extension
131  */
132  void AddExtension(IExtensionPtr extension, const std::string& deviceName);
133 
134  /**
135  * @brief Creates an executable network from a previously exported network
136  *
137  * @param deviceName Name of device load executable network on
138  * @param modelFileName Path to the location of the exported file
139  * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
140  * operation*
141  * @return An executable network reference
142  */
144  const std::string& modelFileName, const std::string& deviceName,
145  const std::map<std::string, std::string>& config = {});
146 
147  /**
148  * @brief Creates an executable network from a previously exported network
149  * @param deviceName Name of device load executable network on
150  * @param networkModel network model stream
151  * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
152  * operation*
153  * @return An executable network reference
154  */
155  ExecutableNetwork ImportNetwork(std::istream& networkModel, const std::string& deviceName = {},
156  const std::map<std::string, std::string>& config = {});
157 
158  /**
159  * @brief Creates an executable network from a previously exported network within a specified
160  * remote context.
161  *
162  * @param networkModel Network model stream
163  * @param context Pointer to RemoteContext object
164  * @param config Optional map of pairs: (config parameter name, config parameter value) relevant only for this load
165  * operation
166  * @return An executable network reference
167  */
168  ExecutableNetwork ImportNetwork(std::istream& networkModel,
169  const RemoteContext::Ptr& context,
170  const std::map<std::string, std::string>& config = {});
171 
172  /**
173  * @brief Query device if it supports specified network with specified configuration
174  *
175  * @param deviceName A name of a device to query
176  * @param network Network object to query
177  * @param config Optional map of pairs: (config parameter name, config parameter value)
178  * @return An object containing a map of pairs a layer name -> a device name supporting this layer.
179  */
181  const CNNNetwork& network, const std::string& deviceName,
182  const std::map<std::string, std::string>& config = {}) const;
183 
184  /**
185  * @brief Sets configuration for device, acceptable keys can be found in ie_plugin_config.hpp
186  *
187  * @param deviceName An optional name of a device. If device name is not specified, the config is set for all the
188  * registered devices.
189  *
190  * @param config Map of pairs: (config parameter name, config parameter value)
191  */
192  void SetConfig(const std::map<std::string, std::string>& config, const std::string& deviceName = {});
193 
194  /**
195  * @brief Gets configuration dedicated to device behaviour.
196  *
197  * The method is targeted to extract information which can be set via SetConfig method.
198  *
199  * @param deviceName - A name of a device to get a configuration value.
200  * @param name - value of config corresponding to config key.
201  * @return Value of config corresponding to config key.
202  */
203  Parameter GetConfig(const std::string& deviceName, const std::string& name) const;
204 
205  /**
206  * @brief Gets general runtime metric for dedicated hardware.
207  *
208  * The method is needed to request common device properties
209  * which are executable network agnostic. It can be device name, temperature, other devices-specific values.
210  *
211  * @param deviceName - A name of a device to get a metric value.
212  * @param name - metric name to request.
213  * @return Metric value corresponding to metric key.
214  */
215  Parameter GetMetric(const std::string& deviceName, const std::string& name) const;
216 
217  /**
218  * @brief Returns devices available for neural networks inference
219  *
220  * @return A vector of devices. The devices are returned as { CPU, FPGA.0, FPGA.1, MYRIAD }
221  If there more than one device of specific type, they are enumerated with .# suffix.
222  */
223  std::vector<std::string> GetAvailableDevices() const;
224 
225  /**
226  * @brief Register new device and plugin which implement this device inside Inference Engine.
227  *
228  * @param pluginName A name of plugin. Depending on platform pluginName is wrapped with shared library suffix and
229  * prefix to identify library full name
230  *
231  * @param deviceName A device name to register plugin for. If device name is not specified, then it's taken from
232  * plugin itself.
233  */
234  void RegisterPlugin(const std::string& pluginName, const std::string& deviceName);
235 
236  /**
237  * @brief Unloads previously loaded plugin with a specified name from Inference Engine
238  * The method is needed to remove plugin instance and free its resources. If plugin for a
239  * specified device has not been created before, the method throws an exception.
240  *
241  * @param deviceName Device name identifying plugin to remove from Inference Engine
242  */
243  void UnregisterPlugin(const std::string& deviceName);
244 
245  /** @brief Registers plugin to Inference Engine Core instance using XML configuration file with
246  * plugins description.
247  *
248  * XML file has the following structure:
249  *
250  * ```xml
251  * <ie>
252  * <plugins>
253  * <plugin name="" location="">
254  * <extensions>
255  * <extension location=""/>
256  * </extensions>
257  * <properties>
258  * <property key="" value=""/>
259  * </properties>
260  * </plugin>
261  * </plugins>
262  * </ie>
263  * ```
264  *
265  * - `name` identifies name of device enabled by plugin
266  * - `location` specifies absolute path to dynamic library with plugin. A path can also be relative to inference
267  * engine shared library. It allows to have common config for different systems with different configurations.
268  * - Properties are set to plugin via the `SetConfig` method.
269  * - Extensions are set to plugin via the `AddExtension` method.
270  *
271  * @param xmlConfigFile A path to .xml file with plugins to register.
272  */
273  void RegisterPlugins(const std::string& xmlConfigFile);
274 
275  /**
276  * @brief Create a new shared context object on specified accelerator device
277  * using specified plugin-specific low level device API parameters (device handle, pointer, etc.)
278  * @param deviceName Name of a device to create new shared context on.
279  * @param params Map of device-specific shared context parameters.
280  * @return A shared pointer to a created remote context.
281  */
282  RemoteContext::Ptr CreateContext(const std::string& deviceName, const ParamMap& params);
283 
284  /**
285  * @brief Get a pointer to default(plugin-supplied) shared context object for specified accelerator device.
286  * @param deviceName - A name of a device to get create shared context from.
287  * @return A shared pointer to a default remote context.
288  */
289  RemoteContext::Ptr GetDefaultContext(const std::string& deviceName);
290 };
291 } // namespace InferenceEngine
InferenceEngine::Core::ReadNetwork
CNNNetwork ReadNetwork(const std::string &modelPath, const std::string &binPath={}) const
Reads models from IR and ONNX formats.
InferenceEngine::Core::GetDefaultContext
RemoteContext::Ptr GetDefaultContext(const std::string &deviceName)
Get a pointer to default(plugin-supplied) shared context object for specified accelerator device.
InferenceEngine::Core::ImportNetwork
ExecutableNetwork ImportNetwork(const std::string &modelFileName, const std::string &deviceName, const std::map< std::string, std::string > &config={})
Creates an executable network from a previously exported network.
InferenceEngine::Core::CreateContext
RemoteContext::Ptr CreateContext(const std::string &deviceName, const ParamMap &params)
Create a new shared context object on specified accelerator device using specified plugin-specific lo...
InferenceEngine::Core::RegisterPlugin
void RegisterPlugin(const std::string &pluginName, const std::string &deviceName)
Register new device and plugin which implement this device inside Inference Engine.
InferenceEngine::Blob::CPtr
std::shared_ptr< const Blob > CPtr
A smart pointer to the const Blob object.
Definition: ie_blob.h:48
InferenceEngine::CNNNetwork
This class contains all the information about the Neural Network and the related binary information.
Definition: ie_cnn_network.h:36
InferenceEngine::Parameter
This class represents an object to work with different parameters.
Definition: ie_parameter.hpp:37
InferenceEngine::Core::UnregisterPlugin
void UnregisterPlugin(const std::string &deviceName)
Unloads previously loaded plugin with a specified name from Inference Engine The method is needed to ...
InferenceEngine::Core::QueryNetwork
QueryNetworkResult QueryNetwork(const CNNNetwork &network, const std::string &deviceName, const std::map< std::string, std::string > &config={}) const
Query device if it supports specified network with specified configuration.
InferenceEngine::Core::ReadNetwork
CNNNetwork ReadNetwork(const std::string &model, const Blob::CPtr &weights) const
Reads models from IR and ONNX formats.
InferenceEngine::Core::AddExtension
void AddExtension(IExtensionPtr extension, const std::string &deviceName)
Registers extension for the specified plugin.
InferenceEngine::Core::AddExtension
void AddExtension(const IExtensionPtr &extension)
Registers extension.
InferenceEngine::IExtensionPtr
std::shared_ptr< IExtension > IExtensionPtr
A shared pointer to a IExtension interface.
Definition: ie_iextension.h:195
ie_executable_network.hpp
A header file that provides wrapper classes for IExecutableNetwork.
InferenceEngine::ParamMap
std::map< std::string, Parameter > ParamMap
An std::map object containing low-level object parameters of classes that are derived from RemoteBlob...
Definition: ie_remote_context.hpp:26
InferenceEngine::Core::Core
Core(const std::string &xmlConfigFile={})
Constructs Inference Engine Core instance using XML configuration file with plugins description.
InferenceEngine::RemoteContext::Ptr
std::shared_ptr< RemoteContext > Ptr
A smart pointer to the RemoteContext object.
Definition: ie_remote_context.hpp:99
InferenceEngine::Core::GetConfig
Parameter GetConfig(const std::string &deviceName, const std::string &name) const
Gets configuration dedicated to device behaviour.
InferenceEngine::ExecutableNetwork
wrapper over IExecutableNetwork
Definition: ie_executable_network.hpp:30
ie_extension.h
A header file that defines a wrapper class for handling extension instantiation and releasing resourc...
InferenceEngine::Core::ImportNetwork
ExecutableNetwork ImportNetwork(std::istream &networkModel, const RemoteContext::Ptr &context, const std::map< std::string, std::string > &config={})
Creates an executable network from a previously exported network within a specified remote context.
InferenceEngine::Core::RegisterPlugins
void RegisterPlugins(const std::string &xmlConfigFile)
Registers plugin to Inference Engine Core instance using XML configuration file with plugins descript...
InferenceEngine::Core::GetAvailableDevices
std::vector< std::string > GetAvailableDevices() const
Returns devices available for neural networks inference.
ie_remote_context.hpp
This is a header file for the IE RemoteContext and RemoteBlob classes.
InferenceEngine::Core::SetConfig
void SetConfig(const std::map< std::string, std::string > &config, const std::string &deviceName={})
Sets configuration for device, acceptable keys can be found in ie_plugin_config.hpp.
InferenceEngine::Core::GetVersions
std::map< std::string, Version > GetVersions(const std::string &deviceName) const
Returns plugins version information.
InferenceEngine::QueryNetworkResult
Response structure encapsulating information about supported layer.
Definition: ie_common.h:255
InferenceEngine::Core::GetMetric
Parameter GetMetric(const std::string &deviceName, const std::string &name) const
Gets general runtime metric for dedicated hardware.
InferenceEngine::Core::LoadNetwork
ExecutableNetwork LoadNetwork(const CNNNetwork &network, RemoteContext::Ptr context, const std::map< std::string, std::string > &config={})
Creates an executable network from a network object within a specified remote context.
InferenceEngine::Core::LoadNetwork
ExecutableNetwork LoadNetwork(const CNNNetwork &network, const std::string &deviceName, const std::map< std::string, std::string > &config={})
Creates an executable network from a network object.
ie_version.hpp
A header file that provides versioning information for the inference engine shared library.
InferenceEngine::Core::ImportNetwork
ExecutableNetwork ImportNetwork(std::istream &networkModel, const std::string &deviceName={}, const std::map< std::string, std::string > &config={})
Creates an executable network from a previously exported network.