Skip to content

Commit

Permalink
fix format and set property
Browse files Browse the repository at this point in the history
Signed-off-by: Hu, Yuan2 <[email protected]>
  • Loading branch information
tiger100256-hu committed Mar 16, 2022
1 parent 2b43de0 commit 1f1d288
Show file tree
Hide file tree
Showing 5 changed files with 47 additions and 47 deletions.
18 changes: 9 additions & 9 deletions docs/snippets/MULTI0.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3,16 +3,16 @@
int main() {
using namespace InferenceEngine;
//! [part0]
ov::Core core;
std::shared_ptr<ov::Model> model = core.read_model("sample.xml");
//NEW IE-CENTRIC API, the "MULTI" plugin is (globally) pre-configured with the explicit option:
core.set_property("MULTI", ov::device::priorities("HDDL,GPU"));
ov::CompiledModel compileModel0 = core.compile_model(model, "MULTI");
ov::Core core;
std::shared_ptr<ov::Model> model = core.read_model("sample.xml");
//NEW IE-CENTRIC API, the "MULTI" plugin is (globally) pre-configured with the explicit option:
core.set_property("MULTI", ov::device::priorities("HDDL,GPU"));
ov::CompiledModel compileModel0 = core.compile_model(model, "MULTI");

//NEW IE-CENTRIC API, configuration of the "MULTI" is part of the network configuration (and hence specific to the network):
ov::CompiledModel compileModel1 = core.compile_model(model, "MULTI", ov::device::priorities("HDDL,GPU"));
//NEW IE-CENTRIC API, same as previous, but configuration of the "MULTI" is part of the name (so config is empty), also network-specific:
ov::CompiledModel compileModel2 = core.compile_model(model, "MULTI:HDDL,GPU");
//NEW IE-CENTRIC API, configuration of the "MULTI" is part of the network configuration (and hence specific to the network):
ov::CompiledModel compileModel1 = core.compile_model(model, "MULTI", ov::device::priorities("HDDL,GPU"));
//NEW IE-CENTRIC API, same as previous, but configuration of the "MULTI" is part of the name (so config is empty), also network-specific:
ov::CompiledModel compileModel2 = core.compile_model(model, "MULTI:HDDL,GPU");
//! [part0]
return 0;
}
28 changes: 14 additions & 14 deletions docs/snippets/MULTI1.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3,20 +3,20 @@
int main() {
using namespace InferenceEngine;
//! [part1]
ov::Core core;
std::shared_ptr<ov::Model> model = core.read_model("sample.xml");
ov::CompiledModel compileModel = core.compile_model(model, "MULTI:HDDL,GPU");
//...
compileModel.set_property(ov::device::priorities("GPU,HDDL"));
// you can even exclude some device
compileModel.set_property(ov::device::priorities("GPU"));
//...
// and then return it back
compileModel.set_property(ov::device::priorities("GPU,HDDL"));
//but you cannot add new devices on the fly, the next line will trigger the following exception:
//[ ERROR ] [NOT_FOUND] You can only change device priorities but not add new devices with the Network's SetConfig(MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES.
//CPU device was not in the original device list!
compileModel.set_property(ov::device::priorities("CPU,GPU,HDDL"));
ov::Core core;
std::shared_ptr<ov::Model> model = core.read_model("sample.xml");
ov::CompiledModel compileModel = core.compile_model(model, "MULTI:HDDL,GPU");
//...
compileModel.set_property(ov::device::priorities("GPU,HDDL"));
// you can even exclude some device
compileModel.set_property(ov::device::priorities("GPU"));
//...
// and then return it back
compileModel.set_property(ov::device::priorities("GPU,HDDL"));
//but you cannot add new devices on the fly, the next line will trigger the following exception:
//[ ERROR ] [NOT_FOUND] You can only change device priorities but not add new devices with the Network's SetConfig(MultiDeviceConfigParams::KEY_MULTI_DEVICE_PRIORITIES.
//CPU device was not in the original device list!
compileModel.set_property(ov::device::priorities("CPU,GPU,HDDL"));
//! [part1]
return 0;
}
18 changes: 9 additions & 9 deletions docs/snippets/MULTI2.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,15 @@
int main() {
using namespace InferenceEngine;
//! [part2]
ov::Core core;
std::shared_ptr<ov::Model> model = core.read_model("sample.xml");
std::string allDevices = "MULTI:";
std::vector<std::string> availableDevices = core.get_available_devices();
for (auto && device : availableDevices) {
allDevices += device;
allDevices += ((device == availableDevices[availableDevices.size()-1]) ? "" : ",");
}
ov::CompiledModel compileModel = core.compile_model(model, allDevices);
ov::Core core;
std::shared_ptr<ov::Model> model = core.read_model("sample.xml");
std::string allDevices = "MULTI:";
std::vector<std::string> availableDevices = core.get_available_devices();
for (auto && device : availableDevices) {
allDevices += device;
allDevices += ((device == availableDevices[availableDevices.size()-1]) ? "" : ",");
}
ov::CompiledModel compileModel = core.compile_model(model, allDevices);
//! [part2]
return 0;
}
20 changes: 10 additions & 10 deletions docs/snippets/MULTI3.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,17 @@

int main() {
//! [part3]
ov::Core core;
std::shared_ptr<ov::Model> model = core.read_model("sample.xml");
std::string allDevices = "MULTI:";
std::vector<std::string> myriadDevices = core.get_property("MYRIAD", ov::available_devices);
for (size_t i = 0; i < myriadDevices.size(); ++i) {
allDevices += std::string("MYRIAD.")
+ myriadDevices[i]
+ std::string(i < (myriadDevices.size() -1) ? "," : "");
}
ov::Core core;
std::shared_ptr<ov::Model> model = core.read_model("sample.xml");
std::string allDevices = "MULTI:";
std::vector<std::string> myriadDevices = core.get_property("MYRIAD", ov::available_devices);
for (size_t i = 0; i < myriadDevices.size(); ++i) {
allDevices += std::string("MYRIAD.")
+ myriadDevices[i]
+ std::string(i < (myriadDevices.size() -1) ? "," : "");
}

ov::CompiledModel compileModel = core.compile_model(model, allDevices);
ov::CompiledModel compileModel = core.compile_model(model, allDevices);
//! [part3]
return 0;
}
10 changes: 5 additions & 5 deletions docs/snippets/MULTI4.cpp
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
#include <openvino/openvino.hpp>

int main() {
ov::AnyMap hddl_config = {{ov::enable_profiling.name(), ov::Any(true)}};
ov::AnyMap gpu_config = {{ov::enable_profiling.name(), ov::Any(true)}};
ov::AnyMap hddl_config = {{ov::enable_profiling(true)}};
ov::AnyMap gpu_config = {{ov::enable_profiling(true)}};
//! [part4]
// configure the HDDL device first
ov::Core core;
std::shared_ptr<ov::Model> model = core.read_model("sample.xml");
core.set_property("HDDL", hddl_config);
// configure the GPU device
core.set_property("GPU", gpu_config);
core.set_property({ov::device::properties("HDDL", hddl_config),
ov::device::properties("GPU", gpu_config)});

// load the network to the multi-device, while specifying the configuration (devices along with priorities):
ov::CompiledModel compileModel = core.compile_model(model, "MULTI", ov::device::priorities("HDDL,GPU"));
// new property allows to query the optimal number of requests:
Expand Down

0 comments on commit 1f1d288

Please sign in to comment.