Skip to content

Commit

Permalink
[JS OV] Add Core.importModel() (#25258)
Browse files Browse the repository at this point in the history
### Details:
 - Add natively asynchronous Core.importModel()
 - Add definition to addon.ts

### Tickets:
 - *136462*

---------

Co-authored-by: Vishniakov Nikolai <[email protected]>
  • Loading branch information
almilosz and vishniakov-nikolai authored Jul 8, 2024
1 parent 64f22d9 commit a3a2b8b
Show file tree
Hide file tree
Showing 6 changed files with 152 additions and 8 deletions.
18 changes: 18 additions & 0 deletions src/bindings/js/node/include/core_wrap.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,9 @@ class CoreWrap : public Napi::ObjectWrap<CoreWrap> {
/** @brief Imports a compiled model from the previously exported one. */
Napi::Value import_model(const Napi::CallbackInfo& info);

/** @brief Implements Core.importModel() defined in ../lib/addon.ts. */
Napi::Value import_model_async(const Napi::CallbackInfo& info);

/** @brief Returns devices available for inference. */
Napi::Value get_available_devices(const Napi::CallbackInfo& info);

Expand All @@ -99,6 +102,7 @@ class CoreWrap : public Napi::ObjectWrap<CoreWrap> {

private:
ov::Core _core;
std::mutex _mutex;
};

struct TsfnContextModel {
Expand Down Expand Up @@ -127,6 +131,20 @@ struct TsfnContextPath {
std::map<std::string, ov::Any> _config = {};
};

struct ImportModelContext {
ImportModelContext(Napi::Env env, ov::Core& core) : deferred(Napi::Promise::Deferred::New(env)), _core{core} {};
std::thread nativeThread;

Napi::Promise::Deferred deferred;
Napi::ThreadSafeFunction tsfn;

std::stringstream _stream;
std::string _device;
std::map<std::string, ov::Any> _config = {};
ov::Core& _core;
ov::CompiledModel _compiled_model;
};

void FinalizerCallbackModel(Napi::Env env, void* finalizeData, TsfnContextModel* context);
void FinalizerCallbackPath(Napi::Env env, void* finalizeData, TsfnContextPath* context);
void compileModelThreadModel(TsfnContextModel* context);
Expand Down
2 changes: 2 additions & 0 deletions src/bindings/js/node/include/helper.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,8 @@ Napi::Array cpp_to_js<ov::Dimension, Napi::Array>(const Napi::CallbackInfo& info
template <>
Napi::Boolean cpp_to_js<bool, Napi::Boolean>(const Napi::CallbackInfo& info, const bool value);

Napi::Object cpp_to_js(const Napi::Env& env, const ov::CompiledModel& compiled_model);

/** @brief Takes Napi::Value and parse Napi::Array or Napi::Object to ov::TensorVector. */
ov::TensorVector parse_input_data(const Napi::Value& input);

Expand Down
11 changes: 10 additions & 1 deletion src/bindings/js/node/lib/addon.ts
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ interface Core {
},
};
/**
* It imports a previously exported compiled model.
* Asynchronously imports a previously exported compiled model.
* @param modelStream The input stream that contains a model,
* previously exported with the {@link CompiledModel.exportModelSync} method.
* @param device The name of a device, for which you import a compiled model.
Expand All @@ -132,6 +132,15 @@ interface Core {
* @param config An object with the key-value pairs
* (property name, property value): relevant only for this load operation.
*/
importModel(
modelStream: Buffer,
device: string,
config?: { [key: string]: string | number | boolean }
): Promise<CompiledModel>;
/**
* A synchronous version of {@link Core.importModel}.
* It imports a previously exported compiled model.
*/
importModelSync(
modelStream: Buffer,
device: string,
Expand Down
61 changes: 61 additions & 0 deletions src/bindings/js/node/src/core_wrap.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,7 @@ Napi::Function CoreWrap::get_class(Napi::Env env) {
InstanceMethod("compileModelSync", &CoreWrap::compile_model_sync_dispatch),
InstanceMethod("compileModel", &CoreWrap::compile_model_async),
InstanceMethod("getAvailableDevices", &CoreWrap::get_available_devices),
InstanceMethod("importModel", &CoreWrap::import_model_async),
InstanceMethod("importModelSync", &CoreWrap::import_model),
InstanceMethod("getAvailableDevices", &CoreWrap::get_available_devices),
InstanceMethod("getVersions", &CoreWrap::get_versions),
Expand Down Expand Up @@ -350,6 +351,66 @@ Napi::Value CoreWrap::import_model(const Napi::CallbackInfo& info) {
}
}

void ImportModelFinalizer(Napi::Env env, void* finalizeData, ImportModelContext* context) {
context->nativeThread.join();
delete context;
};

void importModelThread(ImportModelContext* context, std::mutex& mutex) {
// Imports model without blocking the main thread.
{
const std::lock_guard<std::mutex> lock(mutex);
context->_compiled_model = context->_core.import_model(context->_stream, context->_device, context->_config);
}

// Callback to return to JS the results of core.import_model()
auto callback = [](Napi::Env env, Napi::Function, ImportModelContext* context) {
context->deferred.Resolve(cpp_to_js(env, context->_compiled_model));
};

// Addon's main thread will safely invoke the JS callback function on the behalf of the additional thread.
context->tsfn.BlockingCall(context, callback);
context->tsfn.Release();
}

Napi::Value CoreWrap::import_model_async(const Napi::CallbackInfo& info) {
const auto& env = info.Env();
std::vector<std::string> allowed_signatures;

try {
if (ov::js::validate<Napi::Buffer<uint8_t>, Napi::String>(info, allowed_signatures) ||
ov::js::validate<Napi::Buffer<uint8_t>, Napi::String, Napi::Object>(info, allowed_signatures)) {
// Prepare validated data that will be transferred to the new thread.
auto context_data = new ImportModelContext(env, _core);

const auto& model_data = info[0].As<Napi::Buffer<uint8_t>>();
const auto model_stream = std::string(reinterpret_cast<char*>(model_data.Data()), model_data.Length());
context_data->_stream << model_stream;
context_data->_device = info[1].ToString();
context_data->_config = info.Length() == 3 ? to_anyMap(env, info[2]) : ov::AnyMap();

context_data->tsfn = Napi::ThreadSafeFunction::New(env,
Napi::Function(),
"TSFN",
0,
1,
context_data,
ImportModelFinalizer,
(void*)nullptr);

context_data->nativeThread = std::thread(importModelThread, context_data, std::ref(_mutex));
// Returns a Promise to JS. Method import_model() is performed on additional thread.
return context_data->deferred.Promise();
} else {
OPENVINO_THROW("'importModel'", ov::js::get_parameters_error_msg(info, allowed_signatures));
}

} catch (std::exception& e) {
reportError(info.Env(), e.what());
return info.Env().Undefined();
}
}

Napi::Value CoreWrap::set_property(const Napi::CallbackInfo& info) {
try {
auto args = try_get_set_property_parameters(info);
Expand Down
12 changes: 12 additions & 0 deletions src/bindings/js/node/src/helper.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

#include "node/include/helper.hpp"

#include "node/include/compiled_model.hpp"
#include "node/include/tensor.hpp"
#include "node/include/type_validation.hpp"

Expand Down Expand Up @@ -256,6 +257,17 @@ Napi::Boolean cpp_to_js<bool, Napi::Boolean>(const Napi::CallbackInfo& info, con
return Napi::Boolean::New(info.Env(), value);
}

Napi::Object cpp_to_js(const Napi::Env& env, const ov::CompiledModel& compiled_model) {
const auto& prototype = env.GetInstanceData<AddonData>()->compiled_model;
if (!prototype) {
OPENVINO_THROW("Invalid pointer to CompiledModel prototype.");
}
auto obj = prototype.New({});
const auto cm = Napi::ObjectWrap<CompiledModelWrap>::Unwrap(obj);
cm->set_compiled_model(compiled_model);
return obj;
}

ov::TensorVector parse_input_data(const Napi::Value& input) {
ov::TensorVector parsed_input;
if (input.IsArray()) {
Expand Down
56 changes: 49 additions & 7 deletions src/bindings/js/node/tests/basic.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -236,55 +236,97 @@ describe('Test exportModel()/importModel()', () => {
const inferRequest = compiledModel.createInferRequest();
const res1 = inferRequest.infer([tensor]);

it('Test importModel(stream, device)', () => {
it('Test importModelSync(stream, device)', () => {
const newCompiled = core.importModelSync(userStream, 'CPU');
const newInferRequest = newCompiled.createInferRequest();
const res2 = newInferRequest.infer([tensor]);

assert.deepStrictEqual(res1['fc_out'].data[0], res2['fc_out'].data[0]);
});

it('Test importModel(stream, device, config)', () => {
it('Test importModelSync(stream, device, config)', () => {
const newCompiled = core.importModelSync(userStream, 'CPU', { 'NUM_STREAMS': 1 });
const newInferRequest = newCompiled.createInferRequest();
const res2 = newInferRequest.infer([tensor]);

assert.deepStrictEqual(res1['fc_out'].data[0], res2['fc_out'].data[0]);
});

it('Test importModel(stream, device) throws', () => {
it('Test importModelSync(stream, device) throws', () => {
assert.throws(
() => core.importModelSync(epsilon, 'CPU'),
/The first argument must be of type Buffer./
);
});

it('Test importModel(stream, device) throws', () => {
it('Test importModelSync(stream, device) throws', () => {
assert.throws(
() => core.importModelSync(userStream, tensor),
/The second argument must be of type String./
);
});
it('Test importModel(stream, device, config: tensor) throws', () => {
it('Test importModelSync(stream, device, config: tensor) throws', () => {
assert.throws(
() => core.importModelSync(userStream, 'CPU', tensor),
/NotFound: Unsupported property 0 by CPU plugin./
);
});

it('Test importModel(stream, device, config: string) throws', () => {
it('Test importModelSync(stream, device, config: string) throws', () => {
const testString = 'test';
assert.throws(
() => core.importModelSync(userStream, 'CPU', testString),
/Passed Napi::Value must be an object./
);
});

it('Test importModel(stream, device, config: unsupported property) throws', () => {
it('Test importModelSync(stream, device, config: unsupported property) \
throws', () => {
const tmpDir = '/tmp';
assert.throws(
() => core.importModelSync(userStream, 'CPU', { 'CACHE_DIR': tmpDir }),
/Unsupported property CACHE_DIR by CPU plugin./
);
});

it('Test importModel(stream, device)', () => {
core.importModel(userStream, 'CPU').then(newCompiled => {
const newInferRequest = newCompiled.createInferRequest();
const res2 = newInferRequest.infer([tensor]);
assert.deepStrictEqual(res1['fc_out'].data[0], res2['fc_out'].data[0]);
});
});

it('Test importModel(stream, device, config)', () => {
core.importModel(userStream, 'CPU', { 'NUM_STREAMS': 1 }).then(
newCompiled => {
const newInferRequest = newCompiled.createInferRequest();
const res2 = newInferRequest.infer([tensor]);

assert.deepStrictEqual(res1['fc_out'].data[0], res2['fc_out'].data[0]);
});
});

it('Test importModel(stream, device) throws', () => {
assert.throws(
() => core.importModel(epsilon, 'CPU').then(),
/'importModel' method called with incorrect parameters./
);
});

it('Test importModel(stream, device) throws', () => {
assert.throws(
() => core.importModel(userStream, tensor).then(),
/'importModel' method called with incorrect parameters./
);
});

it('Test importModel(stream, device, config: string) throws', () => {
const testString = 'test';
assert.throws(
() => core.importModel(userStream, 'CPU', testString).then(),
/'importModel' method called with incorrect parameters./
);
});

});

0 comments on commit a3a2b8b

Please sign in to comment.