Skip to content

Commit

Permalink
Added common.hpp file with aliases (openvinotoolkit#7158)
Browse files Browse the repository at this point in the history
  • Loading branch information
apankratovantonp authored and mryzhov committed Aug 23, 2021
1 parent d8704a1 commit 4d0c11b
Show file tree
Hide file tree
Showing 3 changed files with 83 additions and 67 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
// Copyright (C) 2018-2021 Intel Corporation
// SPDX-License-Identifier: Apache-2.0
//

/**
* @brief This is a header file for the OpenVINO Runtime common aliases that depend only from external API
*
* @file openvino/runtime/common.hpp
*/
#pragma once

#include <map>
#include <string>

namespace ov {
namespace ie = InferenceEngine;
namespace runtime {
/**
* @brief This type of map is commonly used to pass set of parameters
*/
using ConfigMap = std::map<std::string, std::string>;
} // namespace runtime
} // namespace ov
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
#include <string>
#include <vector>

#include "common.hpp"
#include "cpp/ie_executable_network.hpp"
#include "ie_plugin_config.hpp"
#include "ie_version.hpp"
Expand Down Expand Up @@ -57,7 +58,7 @@ class INFERENCE_ENGINE_API_CLASS(Core) {
* @param deviceName Device name to identify plugin
* @return A vector of versions
*/
std::map<std::string, InferenceEngine::Version> get_versions(const std::string& deviceName) const;
std::map<std::string, ie::Version> get_versions(const std::string& deviceName) const;

#ifdef ENABLE_UNICODE_PATH_SUPPORT
/**
Expand Down Expand Up @@ -101,7 +102,7 @@ class INFERENCE_ENGINE_API_CLASS(Core) {
* @return Function
*/
std::shared_ptr<ov::Function> read_model(const std::string& model,
const std::shared_ptr<const InferenceEngine::Blob>& weights) const;
const std::shared_ptr<const ie::Blob>& weights) const;

/**
* @brief Creates an executable network from a network object.
Expand All @@ -115,9 +116,9 @@ class INFERENCE_ENGINE_API_CLASS(Core) {
* operation
* @return An executable network reference
*/
InferenceEngine::ExecutableNetwork compile_model(const std::shared_ptr<const ov::Function>& network,
const std::string& deviceName,
const std::map<std::string, std::string>& config = {});
ie::ExecutableNetwork compile_model(const std::shared_ptr<const ov::Function>& network,
const std::string& deviceName,
const ConfigMap& config = {});

/**
* @brief Reads model and creates an executable network from IR or ONNX file
Expand All @@ -132,9 +133,9 @@ class INFERENCE_ENGINE_API_CLASS(Core) {
*
* @return An executable network reference
*/
InferenceEngine::ExecutableNetwork compile_model(const std::string& modelPath,
const std::string& deviceName,
const std::map<std::string, std::string>& config = {});
ie::ExecutableNetwork compile_model(const std::string& modelPath,
const std::string& deviceName,
const ConfigMap& config = {});

/**
* @brief Creates an executable network from a network object within a specified remote context.
Expand All @@ -144,15 +145,15 @@ class INFERENCE_ENGINE_API_CLASS(Core) {
* operation
* @return An executable network object
*/
InferenceEngine::ExecutableNetwork compile_model(const std::shared_ptr<const ov::Function>& network,
const std::shared_ptr<InferenceEngine::RemoteContext>& context,
const std::map<std::string, std::string>& config = {});
ie::ExecutableNetwork compile_model(const std::shared_ptr<const ov::Function>& network,
const std::shared_ptr<ie::RemoteContext>& context,
const ConfigMap& config = {});

/**
* @brief Registers extension
* @param extension Pointer to already loaded extension
*/
void add_extension(const std::shared_ptr<InferenceEngine::IExtension>& extension);
void add_extension(const std::shared_ptr<ie::IExtension>& extension);

/**
* @brief Creates an executable network from a previously exported network
Expand All @@ -162,9 +163,9 @@ class INFERENCE_ENGINE_API_CLASS(Core) {
* operation*
* @return An executable network reference
*/
InferenceEngine::ExecutableNetwork import_model(std::istream& networkModel,
const std::string& deviceName,
const std::map<std::string, std::string>& config = {});
ie::ExecutableNetwork import_model(std::istream& networkModel,
const std::string& deviceName,
const ConfigMap& config = {});

/**
* @brief Creates an executable network from a previously exported network within a specified
Expand All @@ -176,9 +177,9 @@ class INFERENCE_ENGINE_API_CLASS(Core) {
* operation
* @return An executable network reference
*/
InferenceEngine::ExecutableNetwork import_model(std::istream& networkModel,
const std::shared_ptr<InferenceEngine::RemoteContext>& context,
const std::map<std::string, std::string>& config = {});
ie::ExecutableNetwork import_model(std::istream& networkModel,
const std::shared_ptr<ie::RemoteContext>& context,
const ConfigMap& config = {});

/**
* @brief Query device if it supports specified network with specified configuration
Expand All @@ -188,9 +189,9 @@ class INFERENCE_ENGINE_API_CLASS(Core) {
* @param config Optional map of pairs: (config parameter name, config parameter value)
* @return An object containing a map of pairs a layer name -> a device name supporting this layer.
*/
InferenceEngine::QueryNetworkResult query_model(const std::shared_ptr<const ov::Function>& network,
const std::string& deviceName,
const std::map<std::string, std::string>& config = {}) const;
ie::QueryNetworkResult query_model(const std::shared_ptr<const ov::Function>& network,
const std::string& deviceName,
const ConfigMap& config = {}) const;

/**
* @brief Sets configuration for device, acceptable keys can be found in ie_plugin_config.hpp
Expand All @@ -200,7 +201,7 @@ class INFERENCE_ENGINE_API_CLASS(Core) {
*
* @param config Map of pairs: (config parameter name, config parameter value)
*/
void set_config(const std::map<std::string, std::string>& config, const std::string& deviceName = {});
void set_config(const ConfigMap& config, const std::string& deviceName = {});

/**
* @brief Gets configuration dedicated to device behaviour.
Expand All @@ -211,7 +212,7 @@ class INFERENCE_ENGINE_API_CLASS(Core) {
* @param name - config key.
* @return Value of config corresponding to config key.
*/
InferenceEngine::Parameter get_config(const std::string& deviceName, const std::string& name) const;
ie::Parameter get_config(const std::string& deviceName, const std::string& name) const;

/**
* @brief Gets general runtime metric for dedicated hardware.
Expand All @@ -223,7 +224,7 @@ class INFERENCE_ENGINE_API_CLASS(Core) {
* @param name - metric name to request.
* @return Metric value corresponding to metric key.
*/
InferenceEngine::Parameter get_metric(const std::string& deviceName, const std::string& name) const;
ie::Parameter get_metric(const std::string& deviceName, const std::string& name) const;

/**
* @brief Returns devices available for neural networks inference
Expand Down Expand Up @@ -290,15 +291,14 @@ class INFERENCE_ENGINE_API_CLASS(Core) {
* @param params Map of device-specific shared context parameters.
* @return A shared pointer to a created remote context.
*/
std::shared_ptr<InferenceEngine::RemoteContext> create_context(const std::string& deviceName,
const InferenceEngine::ParamMap& params);
std::shared_ptr<ie::RemoteContext> create_context(const std::string& deviceName, const ie::ParamMap& params);

/**
* @brief Get a pointer to default(plugin-supplied) shared context object for specified accelerator device.
* @param deviceName - A name of a device to get create shared context from.
* @return A shared pointer to a default remote context.
*/
std::shared_ptr<InferenceEngine::RemoteContext> get_default_context(const std::string& deviceName);
std::shared_ptr<ie::RemoteContext> get_default_context(const std::string& deviceName);
};
} // namespace runtime
} // namespace ov
73 changes: 33 additions & 40 deletions inference-engine/src/inference_engine/src/ie_core.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1218,7 +1218,7 @@ Core::Core(const std::string& xmlConfigFile) {
register_plugins(core_detail::parseXmlConfig(xmlConfigFile));
}

std::map<std::string, InferenceEngine::Version> Core::get_versions(const std::string& deviceName) const {
std::map<std::string, ie::Version> Core::get_versions(const std::string& deviceName) const {
return _impl->GetVersions(deviceName);
}

Expand All @@ -1232,49 +1232,45 @@ std::shared_ptr<ngraph::Function> Core::read_model(const std::wstring& modelPath
std::shared_ptr<ngraph::Function> Core::read_model(const std::string& modelPath, const std::string& binPath) const {
return _impl->ReadNetwork(modelPath, binPath).getFunction();
}
std::shared_ptr<ngraph::Function> Core::read_model(const std::string& model,
const InferenceEngine::Blob::CPtr& weights) const {
std::shared_ptr<ngraph::Function> Core::read_model(const std::string& model, const ie::Blob::CPtr& weights) const {
return _impl->ReadNetwork(model, weights).getFunction();
}
InferenceEngine::ExecutableNetwork Core::compile_model(const std::shared_ptr<const ngraph::Function>& network,
const std::string& deviceName,
const std::map<std::string, std::string>& config) {
auto exec = _impl->LoadNetwork(InferenceEngine::CNNNetwork(std::const_pointer_cast<ngraph::Function>(network)),
deviceName,
config);
ie::ExecutableNetwork Core::compile_model(const std::shared_ptr<const ngraph::Function>& network,
const std::string& deviceName,
const ConfigMap& config) {
auto exec =
_impl->LoadNetwork(ie::CNNNetwork(std::const_pointer_cast<ngraph::Function>(network)), deviceName, config);
return {exec, exec};
}
InferenceEngine::ExecutableNetwork Core::compile_model(const std::string& modelPath,
const std::string& deviceName,
const std::map<std::string, std::string>& config) {
ie::ExecutableNetwork Core::compile_model(const std::string& modelPath,
const std::string& deviceName,
const ConfigMap& config) {
auto exec = _impl->LoadNetwork(modelPath, deviceName, config);
return {exec, exec};
}

InferenceEngine::ExecutableNetwork Core::compile_model(const std::shared_ptr<const ngraph::Function>& network,
const InferenceEngine::RemoteContext::Ptr& context,
const std::map<std::string, std::string>& config) {
auto exec = _impl->LoadNetwork(InferenceEngine::CNNNetwork(std::const_pointer_cast<ngraph::Function>(network)),
context,
config);
ie::ExecutableNetwork Core::compile_model(const std::shared_ptr<const ngraph::Function>& network,
const ie::RemoteContext::Ptr& context,
const ConfigMap& config) {
auto exec = _impl->LoadNetwork(ie::CNNNetwork(std::const_pointer_cast<ngraph::Function>(network)), context, config);
return {exec, exec};
}

void Core::add_extension(const InferenceEngine::IExtensionPtr& extension) {
void Core::add_extension(const ie::IExtensionPtr& extension) {
_impl->AddExtension(extension);
}

InferenceEngine::ExecutableNetwork Core::import_model(std::istream& networkModel,
const std::string& deviceName,
const std::map<std::string, std::string>& config) {
ie::ExecutableNetwork Core::import_model(std::istream& networkModel,
const std::string& deviceName,
const ConfigMap& config) {
OV_ITT_SCOPED_TASK(ov::itt::domains::IE, "Core::import_model");
auto exec = _impl->ImportNetwork(networkModel, deviceName, config);
return {exec, exec};
}

InferenceEngine::ExecutableNetwork Core::import_model(std::istream& networkModel,
const InferenceEngine::RemoteContext::Ptr& context,
const std::map<std::string, std::string>& config) {
ie::ExecutableNetwork Core::import_model(std::istream& networkModel,
const ie::RemoteContext::Ptr& context,
const ConfigMap& config) {
OV_ITT_SCOPED_TASK(ov::itt::domains::IE, "Core::import_model");

using ExportMagic = std::array<char, 4>;
Expand All @@ -1296,14 +1292,12 @@ InferenceEngine::ExecutableNetwork Core::import_model(std::istream& networkModel
return {exec, exec};
}

InferenceEngine::QueryNetworkResult Core::query_model(const std::shared_ptr<const ngraph::Function>& network,
const std::string& deviceName,
const std::map<std::string, std::string>& config) const {
return _impl->QueryNetwork(InferenceEngine::CNNNetwork(std::const_pointer_cast<ngraph::Function>(network)),
deviceName,
config);
ie::QueryNetworkResult Core::query_model(const std::shared_ptr<const ngraph::Function>& network,
const std::string& deviceName,
const ConfigMap& config) const {
return _impl->QueryNetwork(ie::CNNNetwork(std::const_pointer_cast<ngraph::Function>(network)), deviceName, config);
}
void Core::set_config(const std::map<std::string, std::string>& config, const std::string& deviceName) {
void Core::set_config(const ConfigMap& config, const std::string& deviceName) {
// HETERO case
if (deviceName.find("HETERO:") == 0) {
IE_THROW() << "SetConfig is supported only for HETERO itself (without devices). "
Expand Down Expand Up @@ -1337,7 +1331,7 @@ void Core::set_config(const std::map<std::string, std::string>& config, const st
}
}

InferenceEngine::Parameter Core::get_config(const std::string& deviceName, const std::string& name) const {
ie::Parameter Core::get_config(const std::string& deviceName, const std::string& name) const {
// HETERO case
{
if (deviceName.find("HETERO:") == 0) {
Expand All @@ -1363,13 +1357,13 @@ InferenceEngine::Parameter Core::get_config(const std::string& deviceName, const
auto parsed = core_detail::parseDeviceNameIntoConfig(deviceName);

// we need to return a copy of Parameter object which is created on Core side,
// not in InferenceEngine plugin side, which can be unloaded from Core in a parallel thread
// not in ie plugin side, which can be unloaded from Core in a parallel thread
// TODO: remove this WA after *-31417 is resolved
return core_detail::copyParameterValue(
_impl->GetCPPPluginByName(parsed._deviceName).GetConfig(name, parsed._config));
}

InferenceEngine::Parameter Core::get_metric(const std::string& deviceName, const std::string& name) const {
ie::Parameter Core::get_metric(const std::string& deviceName, const std::string& name) const {
return _impl->GetMetric(deviceName, name);
}

Expand All @@ -1382,7 +1376,7 @@ void Core::register_plugin(const std::string& pluginName, const std::string& dev
}

void Core::unload_plugin(const std::string& deviceName) {
InferenceEngine::DeviceIDParser parser(deviceName);
ie::DeviceIDParser parser(deviceName);
std::string devName = parser.getDeviceName();

_impl->UnloadPluginByName(devName);
Expand All @@ -1392,8 +1386,7 @@ void Core::register_plugins(const std::string& xmlConfigFile) {
_impl->RegisterPluginsInRegistry(xmlConfigFile);
}

InferenceEngine::RemoteContext::Ptr Core::create_context(const std::string& deviceName,
const InferenceEngine::ParamMap& params) {
ie::RemoteContext::Ptr Core::create_context(const std::string& deviceName, const ie::ParamMap& params) {
if (deviceName.find("HETERO") == 0) {
IE_THROW() << "HETERO device does not support remote context";
}
Expand All @@ -1408,7 +1401,7 @@ InferenceEngine::RemoteContext::Ptr Core::create_context(const std::string& devi
return _impl->GetCPPPluginByName(parsed._deviceName).CreateContext(parsed._config);
}

InferenceEngine::RemoteContext::Ptr Core::get_default_context(const std::string& deviceName) {
ie::RemoteContext::Ptr Core::get_default_context(const std::string& deviceName) {
if (deviceName.find("HETERO") == 0) {
IE_THROW() << "HETERO device does not support remote context";
}
Expand All @@ -1419,7 +1412,7 @@ InferenceEngine::RemoteContext::Ptr Core::get_default_context(const std::string&
IE_THROW() << "AUTO device does not support remote context";
}

auto parsed = core_detail::parseDeviceNameIntoConfig(deviceName, InferenceEngine::ParamMap());
auto parsed = core_detail::parseDeviceNameIntoConfig(deviceName, ie::ParamMap());

return _impl->GetCPPPluginByName(parsed._deviceName).GetDefaultContext(parsed._config);
}
Expand Down

0 comments on commit 4d0c11b

Please sign in to comment.