Skip to content

Commit

Permalink
Update nGraph to v0.22.1 (#1582)
Browse files Browse the repository at this point in the history
* Update nGraph to 0.21 and adjust the EP

* Share the graph initializers between custom ops

* Update nGraph to 0.22 and exclude Gather entirely

* Enable building on Windows with nGraph v0.21.1-rc.0

* Disable the unsigned input Shrink op tests for nGraph until the next update

* Line-shortening code refactor

* Fix for the master branch merge artifact

* MKLDNN patches adjustment for Windows

* Exclude MatMulInteger for non-const zero points

* Exclude ConvInteger for non-const zero points

* Enable full Cast op support

* Use the v0.22.1 tag

* Skip ConvTranspose_InvalidKernelShape test for ngraph provider

* Create sub-graph ModelProto from fused_node
  • Loading branch information
Tomasz Dołbniak authored and jywu-msft committed Aug 11, 2019
1 parent 7be40b2 commit 69baf9e
Show file tree
Hide file tree
Showing 10 changed files with 218 additions and 270 deletions.
8 changes: 4 additions & 4 deletions cmake/external/ngraph.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ set(ngraph_SRC ${CMAKE_CURRENT_BINARY_DIR}/ngraph/src/project_ngraph)
set(prebuilt_ONNX_SOURCE_DIR "${PROJECT_SOURCE_DIR}/external/onnx")
set(prebuilt_ONNX_BINARY_DIR "${CMAKE_CURRENT_BINARY_DIR}/onnx")
set(ngraph_URL "https://github.com/NervanaSystems/ngraph.git")
set(ngraph_TAG "v0.18.1")
set(ngraph_TAG "v0.22.1")

# Libraries for python package.
if (WIN32)
Expand Down Expand Up @@ -42,7 +42,7 @@ else()
endif()

# discard prior changes due to unblock incremental builds.
set(NGRAPH_PATCH_DISCARD_COMMAND cd ${ngraph_SRC} && git checkout -- .)
set(NGRAPH_PATCH_DISCARD_COMMAND cd ${ngraph_SRC} && git reset HEAD --hard && git clean -fx)

if (MSVC)
set(prebuilt_ONNX_BINARY_DIR "${CMAKE_CURRENT_BINARY_DIR}/onnx/${CMAKE_BUILD_TYPE}")
Expand All @@ -54,12 +54,12 @@ if (MSVC)
PREFIX ngraph
GIT_REPOSITORY ${ngraph_URL}
GIT_TAG ${ngraph_TAG}
GIT_CONFIG core.autocrlf=input
PATCH_COMMAND ${NGRAPH_PATCH_DISCARD_COMMAND}
COMMAND ${CMAKE_COMMAND} -E copy ${PROJECT_SOURCE_DIR}/patches/ngraph/ngraph_onnx.cmake ${ngraph_SRC}/cmake/external_onnx.cmake
COMMAND git apply --ignore-space-change --ignore-whitespace ${PROJECT_SOURCE_DIR}/patches/ngraph/ngraph_protobuf.patch
COMMAND git apply --ignore-space-change --ignore-whitespace ${PROJECT_SOURCE_DIR}/patches/ngraph/ngraph_fix_install_error.patch
COMMAND git apply --ignore-space-change --ignore-whitespace ${PROJECT_SOURCE_DIR}/patches/ngraph/ngraph_fix_library_path.patch
COMMAND git apply --ignore-space-change --ignore-whitespace ${PROJECT_SOURCE_DIR}/patches/ngraph/ngraph_fix_memory.patch
COMMAND git apply --ignore-space-change --ignore-whitespace ${PROJECT_SOURCE_DIR}/patches/ngraph/ngraph_fix_mkldnn_missing_symbol.patch
CMAKE_ARGS
-DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}
-DNGRAPH_DEX_ONLY=ON
Expand Down
127 changes: 0 additions & 127 deletions cmake/patches/ngraph/ngraph_fix_install_error.patch

This file was deleted.

33 changes: 0 additions & 33 deletions cmake/patches/ngraph/ngraph_fix_library_path.patch

This file was deleted.

64 changes: 64 additions & 0 deletions cmake/patches/ngraph/ngraph_fix_mkldnn_missing_symbol.patch
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
cmake/external_mkldnn.cmake | 1 +
cmake/mkldnn_fix_missing_symbol.patch | 99 +++++++++++++++++++++++++++++++++++
2 files changed, 100 insertions(+)
create mode 100644 cmake/mkldnn_fix_missing_symbol.patch

diff --git a/cmake/external_mkldnn.cmake b/cmake/external_mkldnn.cmake
index 7874aca76..bbae6d1a4 100644
--- a/cmake/external_mkldnn.cmake
+++ b/cmake/external_mkldnn.cmake
@@ -194,7 +194,8 @@ if (WIN32)
CONFIGURE_COMMAND
PATCH_COMMAND ${MKLDNN_PATCH_REVERT_COMMAND}
COMMAND git apply --ignore-space-change --ignore-whitespace ${CMAKE_SOURCE_DIR}/cmake/${MKLDNN_PATCH_FILE}
COMMAND git apply --ignore-space-change --ignore-whitespace ${CMAKE_SOURCE_DIR}/cmake/mkldnn_fix_memory.patch
+ COMMAND git apply --ignore-space-change --ignore-whitespace ${CMAKE_SOURCE_DIR}/cmake/mkldnn_fix_missing_symbol.patch
CMAKE_GENERATOR ${CMAKE_GENERATOR}
CMAKE_GENERATOR_PLATFORM ${CMAKE_GENERATOR_PLATFORM}
CMAKE_GENERATOR_TOOLSET ${CMAKE_GENERATOR_TOOLSET}
diff --git a/cmake/mkldnn_fix_missing_symbol.patch b/cmake/mkldnn_fix_missing_symbol.patch
new file mode 100644
index 000000000..ea1a3bd61
--- /dev/null
+++ b/cmake/mkldnn_fix_missing_symbol.patch
@@ -0,0 +1,40 @@
+commit d485a54ac2b07b7349dabd833961415315a18fea
+Author: Denis Samoilov <[email protected]>
+Date: Sun Apr 14 20:11:33 2019 -0700
+
+ cpu: gemv: fix unresolved symbol
+
+ Fixes #456
+
+diff --git a/src/cpu/gemm/gemm_driver.cpp b/src/cpu/gemm/gemm_driver.cpp
+index 0773b212..df7bc44d 100644
+--- a/src/cpu/gemm/gemm_driver.cpp
++++ b/src/cpu/gemm/gemm_driver.cpp
+@@ -1304,10 +1304,8 @@ static mkldnn_status_t gemm_threading_driver(
+ (float *) arg->co);
+ }
+
+- if (data_traits<a_type>::data_type == data_type::s8) {
+- if (gemm_s8u8s32_jump_to_gemv_s8u8s32(arg)) {
+- return mkldnn_success;
+- }
++ if (gemm_s8u8s32_jump_to_gemv_s8u8s32(arg)) {
++ return mkldnn_success;
+ }
+
+ int nthr = (mkldnn_in_parallel()) ? 1 : mkldnn_get_max_threads();
+diff --git a/src/cpu/gemm/s8x8s32/jit_avx512_core_gemv_s8u8s32.cpp b/src/cpu/gemm/s8x8s32/jit_avx512_core_gemv_s8u8s32.cpp
+index 73d50e40..81646a43 100644
+--- a/src/cpu/gemm/s8x8s32/jit_avx512_core_gemv_s8u8s32.cpp
++++ b/src/cpu/gemm/s8x8s32/jit_avx512_core_gemv_s8u8s32.cpp
+@@ -29,6 +29,10 @@ namespace cpu {
+ template <typename T>
+ int gemm_s8u8s32_jump_to_gemv_s8u8s32(T *arg);
+
++template <>
++int gemm_s8u8s32_jump_to_gemv_s8u8s32(
++ gemm_info_t<float, float, float> *arg) { return 0; }
++
+ template <>
+ int gemm_s8u8s32_jump_to_gemv_s8u8s32(
+ gemm_info_t<int8_t, uint8_t, int32_t> *arg) {
40 changes: 22 additions & 18 deletions onnxruntime/core/providers/ngraph/ngraph_custom_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,11 @@ static bool check_ngraph_dump_ops() {
#endif
}

NGRAPHCustomOp::NGRAPHCustomOp(const ComputeContext* context, const ONNX_NAMESPACE::ModelProto& model_proto,
const std::shared_ptr<ngraph::runtime::Backend>& ng_backend)
: ng_backend_{ng_backend},
model_proto_{model_proto} {
NGRAPHCustomOp::NGRAPHCustomOp(const ComputeContext* context,
const ONNX_NAMESPACE::ModelProto& model_proto,
const std::shared_ptr<ngraph::runtime::Backend>& ng_backend) :
ng_backend_{ng_backend}, model_proto_{model_proto}
{
allocate_func_ = context->allocate_func;
release_func_ = context->release_func;
allocator_ = context->allocator_handle;
Expand All @@ -59,7 +60,6 @@ NGRAPHCustomOp::~NGRAPHCustomOp() {
//This method gets called in critical path of execution: Optimize
void NGRAPHCustomOp::Initialize(const OrtCustomOpApi* api, OrtKernelContext* context) const {
Ort::CustomOpApi ort{*api};
LOGS_DEFAULT(INFO) << "nGraph compiling customOp: " << name_;

size_t num_inputs = ort.KernelContext_GetInputCount(context);

Expand Down Expand Up @@ -88,6 +88,9 @@ void NGRAPHCustomOp::Initialize(const OrtCustomOpApi* api, OrtKernelContext* con
return;
} else {
auto graph_proto = model_proto_.mutable_graph();

LOGS_DEFAULT(INFO) << "[NGRAPHCustomOp] Compiling customOp: " << name_;

// Clear previous shapes if any and set new input shapes
for (size_t i = 0; i < num_inputs; i++) {
auto g_in_shape = graph_proto->mutable_input((int)i)->mutable_type()->mutable_tensor_type()->mutable_shape();
Expand All @@ -108,12 +111,12 @@ void NGRAPHCustomOp::Initialize(const OrtCustomOpApi* api, OrtKernelContext* con
try {
ng_function = ngraph::onnx_import::import_onnx_model(model_stream);
} catch (const std::exception& exp) {
LOGS_DEFAULT(FATAL) << "[" << name_ << "] "
<< "Exception while converting onnx to nGraph: " << std::string(exp.what());
LOGS_DEFAULT(FATAL) << "[NGRAPHCustomOp] " << " - " << name_ << " - "
<< "Exception while importing model to nGraph: " << std::string(exp.what());
throw;
} catch (...) {
LOGS_DEFAULT(FATAL) << "[" << name_ << "] "
<< "Unknown exception while converting onnx to nGraph";
LOGS_DEFAULT(FATAL) << "[NGRAPHCustomOp] " << " - " << name_ << " - "
<< "Unknown exception while importing model to nGraph";
throw;
}

Expand All @@ -125,9 +128,10 @@ void NGRAPHCustomOp::Initialize(const OrtCustomOpApi* api, OrtKernelContext* con
try {
ng_curr_exe_ = ng_backend_->compile(ng_function);
} catch (const std::exception& exp) {
LOGS_DEFAULT(FATAL) << "Exception while compiling nGraph Op: " << name_ << std::string(exp.what());
LOGS_DEFAULT(FATAL) << "[NGRAPHCustomOp] " << " - " << name_ << " - "
<< "Exception while compiling ngraph::Function: " << std::string(exp.what());
} catch (...) {
LOGS_DEFAULT(FATAL) << "Unknown exception while compiling nGraph Op: " << name_;
LOGS_DEFAULT(FATAL) << "[NGRAPHCustomOp] " << " - " << name_ << " - " << "Unknown exception while compiling ngraph::Function";
}
it.first->second = ng_curr_exe_;
}
Expand Down Expand Up @@ -157,9 +161,9 @@ Status NGRAPHCustomOp::Compute(const OrtCustomOpApi* api, OrtKernelContext* cont
ng_inputs.emplace_back(ng_backend_->create_tensor(ng_param->get_output_element_type(0), ng_param->get_output_shape(0), input_data));
}
} catch (const std::exception& exp) {
return ORT_MAKE_STATUS(ONNXRUNTIME, FAIL, "Exception while copying input data to nGraph: " + std::string(exp.what()));
return ORT_MAKE_STATUS(ONNXRUNTIME, FAIL, name_ + ": Exception while copying input data to nGraph: " + std::string(exp.what()));
} catch (...) {
return ORT_MAKE_STATUS(ONNXRUNTIME, FAIL, "Unknown exception while copying input data to nGraph");
return ORT_MAKE_STATUS(ONNXRUNTIME, FAIL, name_ + ": Unknown exception while copying input data to nGraph");
}

// Initialize output tensors
Expand All @@ -176,19 +180,19 @@ Status NGRAPHCustomOp::Compute(const OrtCustomOpApi* api, OrtKernelContext* cont
ng_outputs.emplace_back(ng_backend_->create_tensor(dtype, shape, output_data));
}
} catch (const std::exception& exp) {
return ORT_MAKE_STATUS(ONNXRUNTIME, FAIL, "Exception while creating nGraph output Tensor: " + std::string(exp.what()));
return ORT_MAKE_STATUS(ONNXRUNTIME, FAIL, name_ + ": Exception while creating nGraph output Tensor: " + std::string(exp.what()));
} catch (...) {
return ORT_MAKE_STATUS(ONNXRUNTIME, FAIL, "Unknown exception while creating nGraph output Tensor");
return ORT_MAKE_STATUS(ONNXRUNTIME, FAIL, name_ + ": Unknown exception while creating nGraph output Tensor");
}

// Run the graph through nGraph.
try {
if (!ng_curr_exe_->call(ng_outputs, ng_inputs))
return ORT_MAKE_STATUS(ONNXRUNTIME, FAIL, "Error while executing nGraph computation");
return ORT_MAKE_STATUS(ONNXRUNTIME, FAIL, name_ + ": Error while executing nGraph computation");
} catch (const std::exception& exp) {
return ORT_MAKE_STATUS(ONNXRUNTIME, FAIL, "Exception while executing nGraph computation: " + std::string(exp.what()));
return ORT_MAKE_STATUS(ONNXRUNTIME, FAIL, name_ + ": Exception while executing nGraph computation: " + std::string(exp.what()));
} catch (...) {
return ORT_MAKE_STATUS(ONNXRUNTIME, FAIL, "Unknown exception while executing nGraph computation");
return ORT_MAKE_STATUS(ONNXRUNTIME, FAIL, name_ + ": Unknown exception while executing nGraph computation");
}

return Status::OK();
Expand Down
4 changes: 3 additions & 1 deletion onnxruntime/core/providers/ngraph/ngraph_custom_op.h
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,9 @@ namespace ngraph_ep {

class NGRAPHCustomOp {
public:
NGRAPHCustomOp(const ComputeContext* context, const ONNX_NAMESPACE::ModelProto& model_proto, const std::shared_ptr<ngraph::runtime::Backend>& ng_backend);
NGRAPHCustomOp(const ComputeContext* context,
const ONNX_NAMESPACE::ModelProto& model_proto,
const std::shared_ptr<ngraph::runtime::Backend>& ng_backend);

Status Compute(const OrtCustomOpApi* api, OrtKernelContext* context) const;

Expand Down
Loading

0 comments on commit 69baf9e

Please sign in to comment.