Skip to content

Commit

Permalink
LoadNetwork interface: executable network is returned as return value (
Browse files Browse the repository at this point in the history
…openvinotoolkit#1840)

* LoadNetwork interface

* Added reference to SOPointer to returned ExecutablNetwork

* Fixed test
  • Loading branch information
ilya-lavrenov authored and mryzhov committed Aug 26, 2020
1 parent dc6cd3d commit 4109d86
Show file tree
Hide file tree
Showing 9 changed files with 52 additions and 54 deletions.
7 changes: 4 additions & 3 deletions inference-engine/src/gna_plugin/gna_plugin.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -103,9 +103,10 @@ class GNAPlugin : public InferenceEngine::IInferencePlugin {
void AddExtension(InferenceEngine::IExtensionPtr extension) override;

void SetConfig(const std::map<std::string, std::string> &config) override;
void LoadNetwork(InferenceEngine::IExecutableNetwork::Ptr &executableNetwork,
const InferenceEngine::ICNNNetwork &network,
const std::map<std::string, std::string> &config_map) override { THROW_GNA_EXCEPTION << "Not implemented"; }
InferenceEngine::ExecutableNetwork LoadNetwork(const InferenceEngine::ICNNNetwork &network,
const std::map<std::string, std::string> &config_map) override {
THROW_GNA_EXCEPTION << "Not implemented";
}
InferenceEngine::ExecutableNetwork LoadNetwork(const InferenceEngine::ICNNNetwork &network,
const std::map<std::string, std::string> &config_map,
InferenceEngine::RemoteContext::Ptr context) override { THROW_GNA_EXCEPTION << "Not implemented"; }
Expand Down
10 changes: 4 additions & 6 deletions inference-engine/src/inference_engine/ie_plugin_cpp.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -76,9 +76,7 @@ class InferencePlugin {
}

ExecutableNetwork LoadNetwork(CNNNetwork network, const std::map<std::string, std::string>& config) {
IExecutableNetwork::Ptr ret;
CALL_STATEMENT(actual->LoadNetwork(ret, network, config));
return ExecutableNetwork(ret, actual);
CALL_STATEMENT(return ExecutableNetwork(actual->LoadNetwork(network, config), actual));
}

void AddExtension(InferenceEngine::IExtensionPtr extension) {
Expand All @@ -102,7 +100,7 @@ class InferencePlugin {

ExecutableNetwork ImportNetwork(std::istream& networkModel,
const std::map<std::string, std::string> &config) {
CALL_STATEMENT(return actual->ImportNetwork(networkModel, config));
CALL_STATEMENT(return ExecutableNetwork(actual->ImportNetwork(networkModel, config), actual));
}

Parameter GetMetric(const std::string& name, const std::map<std::string, Parameter>& options) const {
Expand All @@ -111,7 +109,7 @@ class InferencePlugin {

ExecutableNetwork LoadNetwork(const ICNNNetwork& network, const std::map<std::string, std::string>& config,
RemoteContext::Ptr context) {
CALL_STATEMENT(return actual->LoadNetwork(network, config, context));
CALL_STATEMENT(return ExecutableNetwork(actual->LoadNetwork(network, config, context), actual));
}

RemoteContext::Ptr CreateContext(const ParamMap& params) {
Expand All @@ -125,7 +123,7 @@ class InferencePlugin {
ExecutableNetwork ImportNetwork(std::istream& networkModel,
const RemoteContext::Ptr& context,
const std::map<std::string, std::string>& config) {
CALL_STATEMENT(return actual->ImportNetwork(networkModel, context, config));
CALL_STATEMENT(return ExecutableNetwork(actual->ImportNetwork(networkModel, context, config), actual));
}

Parameter GetConfig(const std::string& name, const std::map<std::string, Parameter>& options) const {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -56,16 +56,14 @@ class InferencePluginInternal : public IInferencePlugin {
~InferencePluginInternal() override = default;

public:
void LoadNetwork(IExecutableNetwork::Ptr& executableNetwork, const ICNNNetwork& network,
const std::map<std::string, std::string>& config) override {
LoadNetworkImplPrivate(executableNetwork, network, config);
ExecutableNetwork LoadNetwork(const ICNNNetwork& network,
const std::map<std::string, std::string>& config) override {
return LoadNetworkImplPrivate(network, config);
}

ExecutableNetwork LoadNetwork(const ICNNNetwork& network, const std::map<std::string, std::string>& config,
RemoteContext::Ptr context) override {
IExecutableNetwork::Ptr executableNetworkPtr;
LoadNetworkImplPrivate(executableNetworkPtr, network, config, context);
return ExecutableNetwork(executableNetworkPtr);
return LoadNetworkImplPrivate(network, config, context);;
}

IExecutableNetwork::Ptr ImportNetwork(const std::string& modelFileName,
Expand Down Expand Up @@ -141,14 +139,14 @@ class InferencePluginInternal : public IInferencePlugin {
/**
* @brief A helper method which clones a ICNNNetwork object, keeps InputsDataMap and OutputsDataMap data maps,
* and creates an IExecutableNetwork object
* @param executableNetwork An output executable network object
* @param network An input ICNNNetwork object used to create an executable network object
* @param config A map of string -> string configuration options.
* @param context An optional pointer to RemoteContext
* @return An output executable network object
*/
void LoadNetworkImplPrivate(IExecutableNetwork::Ptr& executableNetwork, const ICNNNetwork& network,
const std::map<std::string, std::string>& config,
RemoteContext::Ptr context = nullptr) {
ExecutableNetwork LoadNetworkImplPrivate(const ICNNNetwork& network,
const std::map<std::string, std::string>& config,
RemoteContext::Ptr context = nullptr) {
InputsDataMap networkInputs, networkInputsCloned;
OutputsDataMap networkOutputs, networkOutputsCloned;
network.getInputsInfo(networkInputs);
Expand All @@ -166,17 +164,16 @@ class InferencePluginInternal : public IInferencePlugin {
impl->setNetworkOutputs(networkOutputsCloned);
impl->SetPointerToPlugin(shared_from_this());

executableNetwork.reset(new ExecutableNetworkBase<ExecutableNetworkInternal>(impl), [](details::IRelease* p) {
p->Release();
});
auto executableNetwork = make_executable_network(impl);
return ExecutableNetwork(executableNetwork);
}

protected:
/**
* @brief Creates an executable network from a parsed network object, users can create as many networks as they need
* and use them simultaneously (up to the limitation of the HW resources)
* @note The function is used in
* InferencePluginInternal::LoadNetwork(IExecutableNetwork::Ptr&, const ICNNNetwork&, const std::map<std::string, std::string>&)
* InferencePluginInternal::LoadNetwork(const ICNNNetwork&, const std::map<std::string, std::string>&)
* which performs common steps first and calls this plugin-dependent method implementation after.
* @param network A network object
* @param config string-string map of config parameters relevant only for this load operation
Expand All @@ -196,7 +193,8 @@ class InferencePluginInternal : public IInferencePlugin {
* @param config string-string map of config parameters relevant only for this load operation
* @return Shared pointer to the ExecutableNetwork object
*/
virtual ExecutableNetworkInternal::Ptr LoadExeNetworkImpl(const ICNNNetwork& network, RemoteContext::Ptr context,
virtual ExecutableNetworkInternal::Ptr LoadExeNetworkImpl(const ICNNNetwork& network,
RemoteContext::Ptr context,
const std::map<std::string, std::string>& config) {
(void)network;
(void)context;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -163,12 +163,12 @@ class IInferencePlugin : public details::IRelease,
/**
* @brief Creates an executable network from an pares network object, users can create as many networks as they need
* and use them simultaneously (up to the limitation of the HW resources)
* @param executableNetwork - a reference to a shared ptr of the returned network interface
* @param network - a network object acquired from InferenceEngine::Core::ReadNetwork
* @param config string-string map of config parameters relevant only for this load operation
* @param network A network object acquired from InferenceEngine::Core::ReadNetwork
* @param config A string-string map of config parameters relevant only for this load operation
* @return Created Executable Network object
*/
virtual void LoadNetwork(IExecutableNetwork::Ptr& executableNetwork, const ICNNNetwork& network,
const std::map<std::string, std::string>& config) = 0;
virtual ExecutableNetwork LoadNetwork(const ICNNNetwork& network,
const std::map<std::string, std::string>& config) = 0;

/**
* @brief Creates an executable network from network object, on specified remote context
Expand All @@ -178,7 +178,8 @@ class IInferencePlugin : public details::IRelease,
* execute the network
* @return Created Executable Network object
*/
virtual ExecutableNetwork LoadNetwork(const ICNNNetwork& network, const std::map<std::string, std::string>& config,
virtual ExecutableNetwork LoadNetwork(const ICNNNetwork& network,
const std::map<std::string, std::string>& config,
RemoteContext::Ptr context) = 0;
/**
* @brief Registers extension within plugin
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,11 @@ void MockPlugin::SetConfig(const std::map<std::string, std::string>& config) {
this->config = config;
}

void MockPlugin::LoadNetwork(IExecutableNetwork::Ptr &ret, const ICNNNetwork &network,
const std::map<std::string, std::string> &config) {
ExecutableNetwork
MockPlugin::LoadNetwork(const ICNNNetwork &network,
const std::map<std::string, std::string> &config) {
if (_target) {
_target->LoadNetwork(ret, network, config);
return _target->LoadNetwork(network, config);
} else {
THROW_IE_EXCEPTION << NOT_IMPLEMENTED_str;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,9 @@ class MockPlugin : public InferenceEngine::InferencePluginInternal {
explicit MockPlugin(InferenceEngine::IInferencePlugin*target);

void SetConfig(const std::map<std::string, std::string>& config) override;
void LoadNetwork(InferenceEngine::IExecutableNetwork::Ptr &ret, const InferenceEngine::ICNNNetwork &network,
const std::map<std::string, std::string> &config) override;
InferenceEngine::ExecutableNetwork
LoadNetwork(const InferenceEngine::ICNNNetwork &network,
const std::map<std::string, std::string> &config) override;
ExecutableNetworkInternal::Ptr
LoadExeNetworkImpl(const InferenceEngine::ICNNNetwork& network,
const std::map<std::string, std::string>& config) override;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ class InferenceEnginePluginInternalTest : public ::testing::Test {
}

void getInferRequestWithMockImplInside(IInferRequest::Ptr &request) {
IExecutableNetwork::Ptr exeNetwork;
ExecutableNetwork exeNetwork;
InputsDataMap inputsInfo;
mockNotEmptyNet.getInputsInfo(inputsInfo);
OutputsDataMap outputsInfo;
Expand All @@ -57,10 +57,8 @@ class InferenceEnginePluginInternalTest : public ::testing::Test {
mockExeNetworkTS = make_shared<MockExecutableNetworkThreadSafe>();
EXPECT_CALL(*mock_plugin_impl.get(), LoadExeNetworkImpl(_, _)).WillOnce(Return(mockExeNetworkTS));
EXPECT_CALL(*mockExeNetworkTS.get(), CreateInferRequestImpl(_, _)).WillOnce(Return(mockInferRequestInternal));
plugin->LoadNetwork(exeNetwork, mockNotEmptyNet, {});
ASSERT_NE(exeNetwork, nullptr) << dsc.msg;
sts = exeNetwork->CreateInferRequest(request, &dsc);
ASSERT_EQ((int) StatusCode::OK, sts) << dsc.msg;
ASSERT_NO_THROW(exeNetwork = plugin->LoadNetwork(mockNotEmptyNet, {}));
ASSERT_NO_THROW(request = exeNetwork.CreateInferRequest());
}
};

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -258,17 +258,17 @@ TEST_F(MKLDNNGraphLeaksTests, MKLDNN_not_release_outputs_fp32) {
ASSERT_NE(1, network.getOutputsInfo().size());

std::shared_ptr<MKLDNNTestEngine> score_engine(new MKLDNNTestEngine());
InferenceEngine::IExecutableNetwork::Ptr exeNetwork1;
ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork1, network, {}));
InferenceEngine::ExecutableNetwork exeNetwork1;
ASSERT_NO_THROW(exeNetwork1 = score_engine->LoadNetwork(network, {}));

size_t modified_outputs_size = score_engine->getGraph(exeNetwork1).GetOutputNodes().size();

InferenceEngine::CNNNetwork network2;
ASSERT_NO_THROW(network2 = core.ReadNetwork(model, weights_ptr));
ASSERT_EQ(1, network2.getOutputsInfo().size());

InferenceEngine::IExecutableNetwork::Ptr exeNetwork2;
ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork2, network2, {}));
InferenceEngine::ExecutableNetwork exeNetwork2;
ASSERT_NO_THROW(exeNetwork2 = score_engine->LoadNetwork(network2, {}));

size_t original_outputs_size = score_engine->getGraph(exeNetwork2).GetOutputNodes().size();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,8 +85,8 @@ class ConvSum: public TestsCommon, public ::testing::WithParamInterface<conv_elt
InferenceEngine::Core ie;
auto network = ie.ReadNetwork(model, getConvWeightsBlob(p.in1, p.conv));
std::shared_ptr<MKLDNNPlugin::Engine> score_engine(new MKLDNNPlugin::Engine());
InferenceEngine::IExecutableNetwork::Ptr exeNetwork1;
ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork1, network, {}));
InferenceEngine::ExecutableNetwork exeNetwork1;
ASSERT_NO_THROW(exeNetwork1 = score_engine->LoadNetwork(network, {}));

auto conv = CommonTestUtils::getLayerByName(network, "Convolution2");
auto eltwise = CommonTestUtils::getLayerByName(network, "Eltwise3");
Expand Down Expand Up @@ -145,8 +145,8 @@ class ConvSumReLU: public TestsCommon, public ::testing::WithParamInterface<conv
auto network = ie.ReadNetwork(model, getConvWeightsBlob(p.in1, p.conv));

std::shared_ptr<MKLDNNPlugin::Engine> score_engine(new MKLDNNPlugin::Engine());
InferenceEngine::IExecutableNetwork::Ptr exeNetwork1;
ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork1, network, { }));
InferenceEngine::ExecutableNetwork exeNetwork1;
ASSERT_NO_THROW(exeNetwork1 = score_engine->LoadNetwork(network, { }));

auto conv = CommonTestUtils::getLayerByName(network, "Convolution2");
auto eltwise = CommonTestUtils::getLayerByName(network, "Eltwise3");
Expand Down Expand Up @@ -206,8 +206,8 @@ class ConvConvSum: public TestsCommon, public ::testing::WithParamInterface<conv
auto network = ie.ReadNetwork(model, getConvWeightsBlob(p.in1, p.conv));

std::shared_ptr<MKLDNNPlugin::Engine> score_engine(new MKLDNNPlugin::Engine());
InferenceEngine::IExecutableNetwork::Ptr exeNetwork1;
ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork1, network, { }));
InferenceEngine::ExecutableNetwork exeNetwork1;
ASSERT_NO_THROW(exeNetwork1 = score_engine->LoadNetwork(network, { }));

auto conv2 = CommonTestUtils::getLayerByName(network, "Convolution2");
auto conv3 = CommonTestUtils::getLayerByName(network, "Convolution3");
Expand Down Expand Up @@ -265,8 +265,8 @@ class ConvConvSumReLU: public TestsCommon, public ::testing::WithParamInterface<
auto network = ie.ReadNetwork(model, CommonTestUtils::getWeightsBlob(weight_size));

std::shared_ptr<MKLDNNPlugin::Engine> score_engine(new MKLDNNPlugin::Engine());
InferenceEngine::IExecutableNetwork::Ptr exeNetwork1;
ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork1, network, { }));
InferenceEngine::ExecutableNetwork exeNetwork1;
ASSERT_NO_THROW(exeNetwork1 = score_engine->LoadNetwork(network, { }));

auto conv2 = CommonTestUtils::getLayerByName(network, "Convolution2");
auto conv3 = CommonTestUtils::getLayerByName(network, "Convolution3");
Expand Down Expand Up @@ -341,8 +341,8 @@ class ConvConvSumReLUPoolConv: public TestsCommon, public ::testing::WithParamIn
auto network = ie.ReadNetwork(model, CommonTestUtils::getWeightsBlob(weight_size));

std::shared_ptr<MKLDNNPlugin::Engine> score_engine(new MKLDNNPlugin::Engine());
InferenceEngine::IExecutableNetwork::Ptr exeNetwork1;
ASSERT_NO_THROW(score_engine->LoadNetwork(exeNetwork1, network, {}));
InferenceEngine::ExecutableNetwork exeNetwork1;
ASSERT_NO_THROW(exeNetwork1 = score_engine->LoadNetwork(network, {}));

auto conv2 = CommonTestUtils::getLayerByName(network, "Convolution2");
auto conv3 = CommonTestUtils::getLayerByName(network, "Convolution3");
Expand Down

0 comments on commit 4109d86

Please sign in to comment.