Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

MXNet external operators #18904

Closed
wants to merge 49 commits into from
Closed
Show file tree
Hide file tree
Changes from 44 commits
Commits
Show all changes
49 commits
Select commit Hold shift + click to select a range
4fea36d
initial commit
Aug 11, 2020
6d5fce2
Merge branch 'master' of https://github.com/apache/incubator-mxnet in…
Aug 15, 2020
3cea397
split lib_api.h into lib_api.cc, updated examples for 2.0/gluon
Aug 16, 2020
ead2684
fixed licenses
Aug 16, 2020
51ce458
whitespace
Aug 16, 2020
1ec330b
whitespace
Aug 16, 2020
bee854b
modernize
Aug 16, 2020
c36363e
fix modernize
Aug 16, 2020
53b4136
fix modernize
Aug 16, 2020
2953891
fix modernize
Aug 16, 2020
0c0cceb
fixed move
Aug 16, 2020
7cbc99b
added lib_api.cc to CMakeLists.txt for example libs
Aug 16, 2020
6965cd7
working example
Aug 16, 2020
db84377
Merge branch 'master' of https://github.com/apache/incubator-mxnet in…
Aug 17, 2020
42d00d0
remove GLOBAL to fix protobuf issue
Aug 17, 2020
2379eed
fixed library unload
Aug 17, 2020
afa87a7
added test target
Aug 17, 2020
b2f5a19
fixed sanity
Aug 17, 2020
68a3733
changed destructor to default
Aug 17, 2020
f90d8ad
Merge branch 'master' of https://github.com/apache/incubator-mxnet in…
Aug 18, 2020
04e88fb
added /LD option for customop_gpu_lib target
Aug 19, 2020
f4aaa84
Merge branch 'master' of https://github.com/apache/incubator-mxnet in…
Aug 19, 2020
b9f67ef
moved /LD inside the <>
Aug 19, 2020
4b9a4dc
diff compile flags for relu_lib.cu and lib_api.cc
Aug 19, 2020
4afe182
set CMAKE_VERBOSE_MAKEFILE for debug
Aug 20, 2020
7a36a40
added -v to ninja
Aug 20, 2020
8607847
added /MT
Aug 20, 2020
4165d02
another try
Aug 20, 2020
55b441f
Merge branch 'master' of https://github.com/apache/incubator-mxnet in…
Aug 20, 2020
f7de08e
changed /MT to -MT
Aug 20, 2020
4b7d119
set flags for cxx separately
Aug 20, 2020
c3719fd
split /LD /MT flags
Aug 20, 2020
1a79284
Merge branch 'master' of https://github.com/apache/incubator-mxnet in…
Aug 20, 2020
f6b9082
Merge branch 'master' of https://github.com/apache/incubator-mxnet in…
Aug 21, 2020
4335985
refactored cuda APIs into header file
Aug 21, 2020
1ef7b0a
removed debugging stuff
Aug 21, 2020
d7e241b
Merge branch 'master' of https://github.com/apache/incubator-mxnet in…
Aug 24, 2020
cac8fba
updated instructions for gpu build
Aug 25, 2020
36e0a6a
moved building into cmakelists
Aug 25, 2020
caaa011
moved build stuff into separate CMakeLists.txt
Aug 25, 2020
27c46d7
fixed gpu example
Aug 25, 2020
591141a
fixed license
Aug 25, 2020
0a4621d
added dlmc library dependency
Aug 25, 2020
c2e534b
added nnvm dependency
Aug 25, 2020
fd9f836
removed nnvm dmlc dependencies, added WINDOWS_EXPORT_ALL_SYMBOLS option
Aug 25, 2020
545aff6
fixed WINDOWS_EXPORT_ALL_SYMBOLS
Aug 25, 2020
df85c38
changed nnvm to shared library
Aug 26, 2020
bc80960
backed out external ops changes
Aug 26, 2020
db438ec
Revert "backed out external ops changes"
Aug 26, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 12 additions & 7 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -700,18 +700,21 @@ endif()
target_compile_definitions(mxnet PUBLIC DMLC_LOG_FATAL_THROW=$<BOOL:${LOG_FATAL_THROW}>)

# extension libraries (custom operators, custom subgraphs) are built by default
add_library(customop_lib SHARED ${CMAKE_CURRENT_SOURCE_DIR}/example/extensions/lib_custom_op/gemm_lib.cc)
add_library(transposecsr_lib SHARED ${CMAKE_CURRENT_SOURCE_DIR}/example/extensions/lib_custom_op/transposecsr_lib.cc)
add_library(transposerowsp_lib SHARED ${CMAKE_CURRENT_SOURCE_DIR}/example/extensions/lib_custom_op/transposerowsp_lib.cc)
add_library(subgraph_lib SHARED ${CMAKE_CURRENT_SOURCE_DIR}/example/extensions/lib_subgraph/subgraph_lib.cc)
add_library(pass_lib SHARED ${CMAKE_CURRENT_SOURCE_DIR}/example/extensions/lib_pass/pass_lib.cc)
add_library(customop_lib SHARED ${CMAKE_CURRENT_SOURCE_DIR}/example/extensions/lib_custom_op/gemm_lib.cc ${CMAKE_CURRENT_SOURCE_DIR}/src/lib_api.cc)
add_library(transposecsr_lib SHARED ${CMAKE_CURRENT_SOURCE_DIR}/example/extensions/lib_custom_op/transposecsr_lib.cc ${CMAKE_CURRENT_SOURCE_DIR}/src/lib_api.cc)
add_library(transposerowsp_lib SHARED ${CMAKE_CURRENT_SOURCE_DIR}/example/extensions/lib_custom_op/transposerowsp_lib.cc ${CMAKE_CURRENT_SOURCE_DIR}/src/lib_api.cc)
add_library(subgraph_lib SHARED ${CMAKE_CURRENT_SOURCE_DIR}/example/extensions/lib_subgraph/subgraph_lib.cc ${CMAKE_CURRENT_SOURCE_DIR}/src/lib_api.cc)
add_library(pass_lib SHARED ${CMAKE_CURRENT_SOURCE_DIR}/example/extensions/lib_pass/pass_lib.cc ${CMAKE_CURRENT_SOURCE_DIR}/src/lib_api.cc)

add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/example/extensions/lib_external_ops ${CMAKE_CURRENT_SOURCE_DIR}/example/extensions/lib_external_ops/build)

target_include_directories(customop_lib PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include/mxnet)
target_include_directories(transposecsr_lib PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include/mxnet)
target_include_directories(transposerowsp_lib PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include/mxnet)
target_include_directories(subgraph_lib PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include/mxnet)
target_include_directories(pass_lib PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include/mxnet)
if(USE_CUDA)
add_library(customop_gpu_lib SHARED ${CMAKE_CURRENT_SOURCE_DIR}/example/extensions/lib_custom_op/relu_lib.cu)
add_library(customop_gpu_lib SHARED ${CMAKE_CURRENT_SOURCE_DIR}/example/extensions/lib_custom_op/relu_lib.cu ${CMAKE_CURRENT_SOURCE_DIR}/src/lib_api.cc)
samskalicky marked this conversation as resolved.
Show resolved Hide resolved
target_include_directories(customop_gpu_lib PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/include/mxnet)
endif()
if(UNIX)
Expand All @@ -730,7 +733,9 @@ elseif(MSVC)
set_target_properties(subgraph_lib PROPERTIES PREFIX "lib")
set_target_properties(pass_lib PROPERTIES PREFIX "lib")
if(USE_CUDA)
target_compile_options(customop_gpu_lib PUBLIC "$<$<COMPILE_LANGUAGE:CUDA>:-Xcompiler=-fPIC>")
target_compile_options(customop_gpu_lib PRIVATE "$<$<COMPILE_LANGUAGE:CUDA>:-Xcompiler=-LD -MT>")
target_compile_options(customop_gpu_lib PRIVATE "$<$<COMPILE_LANGUAGE:CXX>:/LD>")
target_compile_options(customop_gpu_lib PRIVATE "$<$<COMPILE_LANGUAGE:CXX>:/MT>")
set_target_properties(customop_gpu_lib PROPERTIES PREFIX "lib")
endif()
endif()
Expand Down
16 changes: 10 additions & 6 deletions config/linux_gpu.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,8 @@
#
# $ cp config/linux_gpu.cmake config.cmake
#
# Next modify the according entries, and then compile by
# Next modify the entries in the config.cmake like MXNET_CUDA_ARCH to set the specific
# GPU architecture, and then compile by
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks @leezu for that feedback, how about this where we point out that users might want to set the MXNET_CUDA_ARCH when using the linux_gpu.cmake file. And then let them refer below for the specific details. This at least points out that they might need to do something with MXNET_CUDA_ARCH in order to build for GPU

#
# $ mkdir build; cd build
# $ cmake ..
Expand All @@ -42,15 +43,18 @@ set(USE_CUDA ON CACHE BOOL "Build with CUDA support")
set(USE_CUDNN ON CACHE BOOL "Build with cudnn support, if found")

# Target NVIDIA GPU achitecture.
# Valid options are "Auto" for autodetection, "All" for all available
# architectures or a list of architectures by compute capability number, such as
# "7.0" or "7.0;7.5" as well as name, such as "Volta" or "Volta;Turing".
# Valid options are:
# - "Auto" for autodetection, will try and discover which GPU architecture to use by
# looking at the available GPUs on the machine that you're building on
# - "All" for all available GPU architectures supported by the version of CUDA installed
# - "specific GPU architectures" by giving the compute capability number such as
# "7.0" or "7.0;7.5" (ie. sm_70 or sm_75) or you can specify the name like:
# "Volta" or "Volta;Turing".
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@leezu I tried to merge my version which what was there before. Hows this?

# The value specified here is passed to cmake's CUDA_SELECT_NVCC_ARCH_FLAGS to
# obtain the compilation flags for nvcc.
#
# When compiling on a machine without GPU, autodetection will fail and you
# should instead specify the target architecture manually to avoid excessive
# compilation times.
# should instead specify the target architecture manually.
set(MXNET_CUDA_ARCH "Auto" CACHE STRING "Target NVIDIA GPU achitecture")

#---------------------------------------------
Expand Down
2 changes: 1 addition & 1 deletion example/extensions/lib_api/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
# under the License.

all:
g++ -std=c++11 -shared -fPIC init_lib.cc -o libinit_lib.so -I ../../../include/mxnet
g++ -std=c++11 -shared -fPIC init_lib.cc ../../../src/lib_api.cc -o libinit_lib.so -I ../../../include

test:
g++ -std=c++11 -O3 -o libtest libtest.cc -ldl -I ../../../include/mxnet
Expand Down
2 changes: 1 addition & 1 deletion example/extensions/lib_api/init_lib.cc
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
*/

#include <iostream>
#include "lib_api.h"
#include "mxnet/lib_api.h"

using namespace mxnet::ext;

Expand Down
8 changes: 4 additions & 4 deletions example/extensions/lib_custom_op/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -18,16 +18,16 @@
all: gemm_lib relu_lib transposecsr_lib transposerowsp_lib

gemm_lib:
g++ -shared -fPIC -std=c++11 gemm_lib.cc -o libgemm_lib.so -I ../../../include/mxnet
g++ -shared -fPIC -std=c++11 gemm_lib.cc ../../../src/lib_api.cc -o libgemm_lib.so -I ../../../include

relu_lib:
nvcc -shared -std=c++11 -Xcompiler -fPIC relu_lib.cu -o librelu_lib.so -I ../../../include/mxnet
nvcc -shared -std=c++11 -Xcompiler -fPIC relu_lib.cu ../../../src/lib_api.cc -o librelu_lib.so -I ../../../include

transposecsr_lib:
g++ -shared -fPIC -std=c++11 transposecsr_lib.cc -o libtransposecsr_lib.so -I ../../../include/mxnet
g++ -shared -fPIC -std=c++11 transposecsr_lib.cc ../../../src/lib_api.cc -o libtransposecsr_lib.so -I ../../../include

transposerowsp_lib:
g++ -shared -fPIC -std=c++11 transposerowsp_lib.cc -o libtransposerowsp_lib.so -I ../../../include/mxnet
g++ -shared -fPIC -std=c++11 transposerowsp_lib.cc ../../../src/lib_api.cc -o libtransposerowsp_lib.so -I ../../../include

clean:
rm -rf libgemm_lib.so librelu_lib.so libtransposecsr_lib.so libtransposerowsp_lib.so
2 changes: 1 addition & 1 deletion example/extensions/lib_custom_op/gemm_lib.cc
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@

#include <iostream>
#include <utility>
#include "lib_api.h"
#include "mxnet/lib_api.h"

using namespace mxnet::ext;

Expand Down
2 changes: 1 addition & 1 deletion example/extensions/lib_custom_op/relu_lib.cu
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
*/

#include <iostream>
#include "lib_api.h"
#include "mxnet/lib_api.h"

using namespace mxnet::ext;

Expand Down
24 changes: 12 additions & 12 deletions example/extensions/lib_custom_op/test_transposecsr.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,25 +54,25 @@
print("indices:", c.indices.asnumpy())
print("indptr:", c.indptr.asnumpy())

print("--------start symbolic compute--------")
print("--------start Gluon compute--------")
d = mx.sym.Variable('d')
e = mx.sym.my_transposecsr(d)
f = mx.sym.my_state_transposecsr(d, test_kw=200)

exe = e.bind(ctx=mx.cpu(),args={'d':a})
exe2 = f.bind(ctx=mx.cpu(),args={'d':a})
out = exe.forward()
block = mx.gluon.nn.SymbolBlock(e, [d])
out = block(a)
print("Compute Results:")
print("data:", out[0].data.asnumpy())
print("indices:", out[0].indices.asnumpy())
print("indptr:", out[0].indptr.asnumpy())
print("data:", out.data.asnumpy())
print("indices:", out.indices.asnumpy())
print("indptr:", out.indptr.asnumpy())

out2 = exe2.forward()
out2 = exe2.forward()
block2 = mx.gluon.nn.SymbolBlock(f,[d])
out2 = block2(a)
out2 = block2(a)
print("Stateful Compute Result:")
print("data:", out2[0].data.asnumpy())
print("indices:", out2[0].indices.asnumpy())
print("indptr:", out2[0].indptr.asnumpy())
print("data:", out2.data.asnumpy())
print("indices:", out2.indices.asnumpy())
print("indptr:", out2.indptr.asnumpy())

print("--------Baseline(dense)--------")
print(mx.nd.transpose(a.tostype('default')))
26 changes: 16 additions & 10 deletions example/extensions/lib_custom_op/test_transposerowsp.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,23 +51,29 @@
print("data:", c.data.asnumpy())
print("indices:", c.indices.asnumpy())

print("--------start symbolic compute--------")
print("--------start Gluon compute--------")
d = mx.sym.Variable('d')
e = mx.sym.my_transposerowsp(d)
f = mx.sym.my_state_transposerowsp(d, test_kw=200)

exe = e.bind(ctx=mx.cpu(),args={'d':a})
exe2 = f.bind(ctx=mx.cpu(),args={'d':a})
out = exe.forward()
#exe = e.bind(ctx=mx.cpu(),args={'d':a})
block = mx.gluon.nn.SymbolBlock(e,[d])
#out = exe.forward()
out = block(a)
print("Compute Results:")
print("data:", out[0].data.asnumpy())
print("indices:", out[0].indices.asnumpy())
print(out)
print("data:", out.data.asnumpy())
print("indices:", out.indices.asnumpy())

out2 = exe2.forward()
out2 = exe2.forward()
#exe2 = f.bind(ctx=mx.cpu(),args={'d':a})
block2 = mx.gluon.nn.SymbolBlock(f,[d])
#out2 = exe2.forward()
out2 = block2(a)
#out2 = exe2.forward()
out2 = block2(a)
print("Stateful Compute Result:")
print("data:", out2[0].data.asnumpy())
print("indices:", out2[0].indices.asnumpy())
print("data:", out2.data.asnumpy())
print("indices:", out2.indices.asnumpy())

print("--------Baseline(dense)--------")
print(mx.nd.transpose(a.tostype('default')))
2 changes: 1 addition & 1 deletion example/extensions/lib_custom_op/transposecsr_lib.cc
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@

#include <iostream>
#include <utility>
#include "lib_api.h"
#include "mxnet/lib_api.h"

using namespace mxnet::ext;

Expand Down
2 changes: 1 addition & 1 deletion example/extensions/lib_custom_op/transposerowsp_lib.cc
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@

#include <iostream>
#include <utility>
#include "lib_api.h"
#include "mxnet/lib_api.h"

using namespace mxnet::ext;

Expand Down
36 changes: 36 additions & 0 deletions example/extensions/lib_external_ops/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
# specify CXX sources
FILE(GLOB CXX_SRCS
# Required files
${CMAKE_CURRENT_SOURCE_DIR}/init_lib.cc
${CMAKE_CURRENT_SOURCE_DIR}/../../../src/lib_api.cc
# Your custom files
${CMAKE_CURRENT_SOURCE_DIR}/min_ex.cc
)

# create library & set libraries
add_library(external_lib SHARED $<TARGET_OBJECTS:nnvm> ${CXX_SRCS})
target_link_libraries(external_lib PUBLIC mshadow mxnet dmlc)

# generic GPU stuff
if(USE_CUDA)
# specify GPU sources (optional)
FILE(GLOB CU_SRCS "*.cu")
target_sources(external_lib PUBLIC ${CU_SRCS})
endif(USE_CUDA)

if(UNIX)
# unix-specific stuff
if(USE_CUDA)
# unix+GPU-specific stuff
target_compile_options(external_lib PUBLIC -shared)
endif(USE_CUDA)
elseif(MSVC)
# windows-specific stuff
target_compile_options(external_lib PRIVATE "$<$<COMPILE_LANGUAGE:CXX>:/LD>")
target_compile_options(external_lib PRIVATE "$<$<COMPILE_LANGUAGE:CXX>:/MT>")
set_target_properties(external_lib PROPERTIES PREFIX "lib")
if(USE_CUDA)
# windows+GPU-specific stuff
target_compile_options(external_lib PRIVATE "$<$<COMPILE_LANGUAGE:CUDA>:-Xcompiler=-LD -MT>")
endif(USE_CUDA)
endif()
38 changes: 38 additions & 0 deletions example/extensions/lib_external_ops/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
<!--- Licensed to the Apache Software Foundation (ASF) under one -->
<!--- or more contributor license agreements. See the NOTICE file -->
<!--- distributed with this work for additional information -->
<!--- regarding copyright ownership. The ASF licenses this file -->
<!--- to you under the Apache License, Version 2.0 (the -->
<!--- "License"); you may not use this file except in compliance -->
<!--- with the License. You may obtain a copy of the License at -->

<!--- http://www.apache.org/licenses/LICENSE-2.0 -->

<!--- Unless required by applicable law or agreed to in writing, -->
<!--- software distributed under the License is distributed on an -->
<!--- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -->
<!--- KIND, either express or implied. See the License for the -->
<!--- specific language governing permissions and limitations -->
<!--- under the License. -->

External Operators Example and Tutorial
=======================================

## Introduction

Extending MXNet with custom components used to mean distributing a custom fork. This feature allows adding custom components to MXNet by dynamically loading external libraries at runtime.

## Getting Started

### Have MXNet Ready

For this tutorial, clone MXNet from source and build it.

### Run An Example

This example shows compiling a custom backend operator and then dynamically loading it into MXNet at runtime. Go to the **lib_external_ops** directory and follow these steps:

1. Touch or modify the **min_ex.cc** and/or **min_ex-inl.h** file(s)
2. Go into the **build** directory that was created when building MXNet.
3. Run `make external_lib`. Notice that **libexternal_lib.so** has been rebuilt
4. Go to the **example/extensions/lib_external_ops** directory again and run `python test_loading.py`.
39 changes: 39 additions & 0 deletions example/extensions/lib_external_ops/init_lib.cc
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

/*!
* Copyright (c) 2015 by Contributors
* \file init_lib.cc
* \brief Sample library file
*/

#include <iostream>
#include "mxnet/lib_api.h"

using namespace mxnet::ext;

MXReturnValue initialize(int version) {
if (version >= 10700) {
std::cout << "MXNet version " << version << " supported" << std::endl;
return MX_SUCCESS;
} else {
MX_ERROR_MSG << "MXNet version " << version << " not supported";
return MX_FAIL;
}
}
Loading