Skip to content

Commit

Permalink
[submodule] Update tvm to the latest (apache#17438)
Browse files Browse the repository at this point in the history
* sync latest tvm

* Upgrade enum according to updated tvm

* fix lint

* Fix amalgamation

* fix remaining NodePtr
  • Loading branch information
yzhliu authored and Ubuntu committed Feb 19, 2020
1 parent aec3e1b commit 033fba9
Show file tree
Hide file tree
Showing 103 changed files with 392 additions and 393 deletions.
2 changes: 1 addition & 1 deletion 3rdparty/tvm
Submodule tvm updated from f8f4ce to 9bd2c7
Original file line number Diff line number Diff line change
Expand Up @@ -95,10 +95,10 @@ class SgProperty : public SubgraphProperty {
static SubgraphPropertyPtr Create() {
return std::make_shared<SgProperty>();
}
nnvm::NodePtr CreateSubgraphNode(
nnvm::ObjectPtr CreateSubgraphNode(
const nnvm::Symbol &sym, const int subgraph_id = 0) const override {
// We can use CachedOp to execute the subgraph.
nnvm::NodePtr n = nnvm::Node::Create();
nnvm::ObjectPtr n = nnvm::Node::Create();
n->attrs.op = Op::Get("_CachedOp");
n->attrs.name = "ConvBN" + std::to_string(subgraph_id);
n->attrs.subgraphs.push_back(std::make_shared<nnvm::Symbol>(sym));
Expand Down
4 changes: 2 additions & 2 deletions docs/static_site/src/pages/api/faq/new_op.md
Original file line number Diff line number Diff line change
Expand Up @@ -290,7 +290,7 @@ that the i-th input can share memory with the j-th output.
If an operator has gradient, it can be described with `FGradient` with prototype
```c++
std::vector<nnvm::NodeEntry>(const nnvm::NodePtr& n,
std::vector<nnvm::NodeEntry>(const nnvm::ObjectPtr& n,
const std::vector<nnvm::NodeEntry>& ograds)
```

Expand All @@ -313,7 +313,7 @@ again. This avoids uneccessary copies of the shared_ptr.
```c++
for (size_t i = 0; i < n->inputs.size(); ++i) {
nnvm::NodePtr node = nnvm::Node::Create();
nnvm::ObjectPtr node = nnvm::Node::Create();
node->attrs.op = copy_op;
node->inputs = {ograds[0]};
ret.emplace_back(std::move(node));
Expand Down
10 changes: 5 additions & 5 deletions include/mxnet/imperative.h
Original file line number Diff line number Diff line change
Expand Up @@ -62,18 +62,18 @@ class Imperative {
AGInfo() :
grad_req(kNullOp), fresh_out_grad(false) {}

static void Clear(const nnvm::NodePtr& node) {
static void Clear(const nnvm::ObjectPtr& node) {
if (node == nullptr || node->info.empty()) return;
AGInfo& info = Get(node);
if (info.grad_req != kNullOp) return;
node->info.clear();
}

static AGInfo& Get(const nnvm::NodePtr& node) {
static AGInfo& Get(const nnvm::ObjectPtr& node) {
return dmlc::get<AGInfo>(node->info);
}

static AGInfo& Create(const nnvm::NodePtr& node) {
static AGInfo& Create(const nnvm::ObjectPtr& node) {
node->info.construct<AGInfo>();
return Get(node);
}
Expand All @@ -82,7 +82,7 @@ class Imperative {
return arr.entry_.node == nullptr || arr.entry_.node->info.empty();
}

static bool IsVariable(const nnvm::NodePtr& node) {
static bool IsVariable(const nnvm::ObjectPtr& node) {
AGInfo& info = Get(node);
return info.grad_req != kNullOp && info.outputs.size() == 1
&& info.out_grads.size() == 1;
Expand Down Expand Up @@ -196,7 +196,7 @@ class Imperative {
}
/*! \brief find the input/output ndarrays that are needed for backward */
void GetBackwardDependency(
const nnvm::NodePtr& node,
const nnvm::ObjectPtr& node,
uint32_t num_inputs, uint32_t num_outputs,
std::vector<bool> *p_save_inputs,
std::vector<bool> *p_save_outputs);
Expand Down
2 changes: 1 addition & 1 deletion include/mxnet/op_attr_types.h
Original file line number Diff line number Diff line change
Expand Up @@ -329,7 +329,7 @@ using FQuantizable = std::function<QuantizeType (const NodeAttrs& attrs)>;
* \brief Register a quantized node creation function based on the attrs of the node
* \note Register under "FQuantizedOp" for non-quantized operators
*/
using FQuantizedOp = std::function<nnvm::NodePtr (const NodeAttrs& attrs)>;
using FQuantizedOp = std::function<nnvm::ObjectPtr (const NodeAttrs& attrs)>;

/*!
* \brief Register a function to determine if the output of a quantized operator
Expand Down
2 changes: 1 addition & 1 deletion src/c_api/c_api.cc
Original file line number Diff line number Diff line change
Expand Up @@ -580,7 +580,7 @@ int MXLoadLib(const char *path) {
};

// FGradient register lambda
auto grad_reg = [=](const nnvm::NodePtr& n, const std::vector<nnvm::NodeEntry>& ograds) {
auto grad_reg = [=](const nnvm::ObjectPtr& n, const std::vector<nnvm::NodeEntry>& ograds) {
// copy gradients first
std::vector<nnvm::NodeEntry> heads(ograds.begin(), ograds.end());
// copy inputs second
Expand Down
4 changes: 2 additions & 2 deletions src/c_api/c_api_function.cc
Original file line number Diff line number Diff line change
Expand Up @@ -42,11 +42,11 @@ struct CustomFunctionParam {
};

std::vector<nnvm::NodeEntry> Gradient(
const nnvm::NodePtr& n,
const nnvm::ObjectPtr& n,
const std::vector<nnvm::NodeEntry>& out_grads) {
const CustomFunctionParam& params = nnvm::get<CustomFunctionParam>(n->attrs.parsed);

nnvm::NodePtr g = nnvm::Node::Create();
nnvm::ObjectPtr g = nnvm::Node::Create();
g->attrs.op = nnvm::Op::Get("_backward_CustomFunction");
g->attrs.name = n->attrs.name + "_backward";
g->attrs.parsed = params;
Expand Down
10 changes: 5 additions & 5 deletions src/c_api/c_api_symbolic.cc
Original file line number Diff line number Diff line change
Expand Up @@ -386,7 +386,7 @@ int MXSymbolCutSubgraph(SymbolHandle sym, SymbolHandle **input_symbols,
const std::string &subg_name = it->second;
std::vector<nnvm::NodeEntry *> input_entries;
DFSVisit(s->outputs, [&subg_attr, &subg_name, &input_entries]
(nnvm::NodePtr n) {
(nnvm::ObjectPtr n) {
// If the node itself isn't in the subgraph, we ignore it.
auto it = n->attrs.dict.find(subg_attr);
if (it == n->attrs.dict.end() || it->second != subg_name)
Expand Down Expand Up @@ -431,7 +431,7 @@ int MXSymbolCutSubgraph(SymbolHandle sym, SymbolHandle **input_symbols,
void ConvertShapeAttrToNumPyCompatible(nnvm::Graph* g) {
if (Imperative::Get()->is_np_shape()
&& (!g->HasAttr("is_np_shape") || !g->GetAttr<int>("is_np_shape"))) {
DFSVisit(g->outputs, [](nnvm::NodePtr n) {
DFSVisit(g->outputs, [](nnvm::ObjectPtr n) {
if (n->is_variable()) {
auto it = n->attrs.dict.find("__shape__");
if (it != n->attrs.dict.end()) {
Expand Down Expand Up @@ -1094,13 +1094,13 @@ static void _SetInputDTypes(
// if model_params is provided the function will dtype of only model params.
// if model_params is empty, the function will dtype of all nodes which had
// a prior dtype set.
// args is a const_reference vector of NodePtrs. NodePtrs are immutable but
// args is a const_reference vector of ObjectPtrs. ObjectPtrs are immutable but
// the Nodes they are pointing will be mutated in this function
static void _UpdateSymDTypeAttrs(
const std::unordered_map<std::string, int>& node_name_dtype_map,
const std::unordered_map<std::string, int>& node_without_dtype_map,
const std::unordered_set<std::string>& model_params,
const std::vector<nnvm::NodePtr>& args) {
const std::vector<nnvm::ObjectPtr>& args) {
const std::string dtype_keyword = "__dtype__";

// Update args to have the right dtype attrs
Expand Down Expand Up @@ -1250,7 +1250,7 @@ int MXReducePrecisionSymbol(SymbolHandle sym_handle,
result_sym->outputs = g.outputs;
*ret_sym_handle = result_sym;
nnvm::Symbol *ret_sym = static_cast<nnvm::Symbol *>(*ret_sym_handle);
const std::vector<nnvm::NodePtr>& args = ret_sym->ListInputs(nnvm::Symbol::kAll);
const std::vector<nnvm::ObjectPtr>& args = ret_sym->ListInputs(nnvm::Symbol::kAll);

// update symbol dtype attrs using the node name -> dtype mapping, if dtype is already set
// in the symbol, else set dtype for the model_params
Expand Down
10 changes: 5 additions & 5 deletions src/common/exec_utils.cc
Original file line number Diff line number Diff line change
Expand Up @@ -32,12 +32,12 @@ namespace common {

void CopyGraph(nnvm::Graph *dst, const nnvm::Graph &src, bool copy_variables) {
using nnvm::Node;
using nnvm::NodePtr;
using nnvm::ObjectPtr;
using nnvm::NodeEntry;
std::unordered_map<Node*, NodePtr> old_new;
std::unordered_map<Node*, ObjectPtr> old_new;
// use DFSVisit to copy all the nodes
DFSVisit(src.outputs, [&old_new, copy_variables](const NodePtr& node) {
NodePtr np;
DFSVisit(src.outputs, [&old_new, copy_variables](const ObjectPtr& node) {
ObjectPtr np;
if (copy_variables || !node->is_variable()) {
np = Node::Create();
np->attrs = node->attrs;
Expand All @@ -52,7 +52,7 @@ void CopyGraph(nnvm::Graph *dst, const nnvm::Graph &src, bool copy_variables) {
Node *ptr = e.node.get();
kv.second->inputs.emplace_back(NodeEntry{old_new[ptr], e.index, e.version});
}
for (const NodePtr& p : kv.first->control_deps) {
for (const ObjectPtr& p : kv.first->control_deps) {
kv.second->control_deps.emplace_back(old_new[p.get()]);
}
}
Expand Down
28 changes: 14 additions & 14 deletions src/executor/eliminate_common_expr_pass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ namespace exec {
namespace {

using nnvm::Node;
using nnvm::NodePtr;
using nnvm::ObjectPtr;
using nnvm::Graph;
using nnvm::IndexedGraph;

Expand Down Expand Up @@ -94,12 +94,12 @@ bool NodeEqual(const Node* n, const Node* m) {
}

// Graph traversal to create a list of pairs of identical-function nodes that can be combined.
std::vector<std::pair<NodePtr, NodePtr> > GetCommonNodes(const Graph& g) {
std::vector<std::pair<NodePtr, NodePtr> > ret;
std::vector<std::pair<ObjectPtr, ObjectPtr> > GetCommonNodes(const Graph& g) {
std::vector<std::pair<ObjectPtr, ObjectPtr> > ret;
// A map between a vector of inputs and those nodes that have those inputs
std::map<std::vector<NodeInput>, std::vector<const NodePtr*> > grouped_nodes;
std::map<std::vector<NodeInput>, std::vector<const ObjectPtr*> > grouped_nodes;
// Traverse the graph and group the nodes by their vector of inputs
nnvm::DFSVisit(g.outputs, [&grouped_nodes](const NodePtr& n) {
nnvm::DFSVisit(g.outputs, [&grouped_nodes](const ObjectPtr& n) {
if (n->inputs.size() != 0) {
grouped_nodes[ConvertInputs(n->inputs)].push_back(&n);
}
Expand All @@ -116,8 +116,8 @@ std::vector<std::pair<NodePtr, NodePtr> > GetCommonNodes(const Graph& g) {
// be eliminated in favor of the other Node (the 'src').
if (NodeEqual(node_group[i]->get(), node_group[j]->get())) {
visited.insert(j);
NodePtr src = *node_group[i];
NodePtr replaced = *node_group[j];
ObjectPtr src = *node_group[i];
ObjectPtr replaced = *node_group[j];
ret.emplace_back(src, replaced);
}
}
Expand All @@ -131,14 +131,14 @@ std::vector<std::pair<NodePtr, NodePtr> > GetCommonNodes(const Graph& g) {
* \brief Do a single pass of Node elimination given pairs of identical Nodes.
*/
void EliminateCommonNodes(Graph* g,
const std::vector<std::pair<NodePtr, NodePtr> >& common_nodes) {
const std::vector<std::pair<ObjectPtr, ObjectPtr> >& common_nodes) {
for (const auto &p : common_nodes) {
std::vector <NodePtr> nodes_to_change;
const NodePtr &src = p.first;
const NodePtr &replaced = p.second;
std::vector <ObjectPtr> nodes_to_change;
const ObjectPtr &src = p.first;
const ObjectPtr &replaced = p.second;
// Create a `nodes_to_change` list containing the Nodes that refer to the `replaced` Node
// that is targeted for elimination.
DFSVisit(g->outputs, [replaced, &nodes_to_change](const NodePtr &n) {
DFSVisit(g->outputs, [replaced, &nodes_to_change](const ObjectPtr &n) {
for (const auto &dep : n->control_deps) {
if (dep == replaced) {
nodes_to_change.push_back(n);
Expand Down Expand Up @@ -189,7 +189,7 @@ void EliminateCommonNodes(Graph* g,
if (kv == unique_outputs.end()) {
unique_outputs.emplace(g->outputs[i], 0);
} else {
NodePtr copy_node = Node::Create();
ObjectPtr copy_node = Node::Create();
std::ostringstream os;
os << kv->first.node->attrs.name << "_" << kv->second << "_copy";
kv->second++;
Expand All @@ -207,7 +207,7 @@ void EliminateCommonNodes(Graph* g,
* \brief Simplify a graph by iteratively eliminating Nodes with identical inputs and function.
*/
nnvm::Graph EliminateCommonExpr(nnvm::Graph&& g) {
using nnvm::NodePtr;
using nnvm::ObjectPtr;
bool keep_running = true;
while (keep_running) {
const auto& common_nodes = GetCommonNodes(g);
Expand Down
4 changes: 2 additions & 2 deletions src/executor/exec_pass.h
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ namespace mxnet {
namespace exec {

template <typename Attr>
using FAccessSubgraphAttr = std::function<std::tuple<const nnvm::NodePtr,
using FAccessSubgraphAttr = std::function<std::tuple<const nnvm::ObjectPtr,
std::vector<Attr>,
std::vector<Attr>>
(const NodeAttrs& attrs)>;
Expand All @@ -52,7 +52,7 @@ using FAccessSubgraphStorageType = FAccessSubgraphAttr<int>;

template <typename Attr>
using FProvideSubgraphAttr = std::function<void (const NodeAttrs& attrs,
const std::vector<nnvm::NodePtr> &nodes,
const std::vector<nnvm::ObjectPtr> &nodes,
const std::vector<std::vector<Attr>> &in_attrs,
const std::vector<std::vector<Attr>> &out_attrs)>;
using FProvideSubgraphShape = FProvideSubgraphAttr<mxnet::TShape>;
Expand Down
16 changes: 8 additions & 8 deletions src/executor/graph_executor.cc
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,7 @@ const std::unordered_map<std::string, NDArray>& GraphExecutor::aux_state_map() c

static nnvm::NodeEntry AttrHint(nnvm::NodeEntry src, nnvm::NodeEntry like) {
static const Op* id_like = Op::Get("_identity_with_attr_like_rhs");
nnvm::NodePtr n = nnvm::Node::Create();
nnvm::ObjectPtr n = nnvm::Node::Create();
n->attrs.op = id_like;
n->attrs.name = src.node->attrs.name + "_id";
n->inputs = {src, like};
Expand All @@ -233,7 +233,7 @@ nnvm::NodeEntry AggregateGradient(std::vector<nnvm::NodeEntry>&& v) {
static const Op* zeros_like_op = Op::Get("zeros_like");

if (v.empty()) {
nnvm::NodePtr ng = nnvm::Node::Create();
nnvm::ObjectPtr ng = nnvm::Node::Create();
ng->attrs.op = Op::Get("_zeros_without_dtype");
ng->attrs.name = "zeros_without_dtype";
ng->attrs.op->attr_parser(&(ng->attrs));
Expand All @@ -253,7 +253,7 @@ nnvm::NodeEntry AggregateGradient(std::vector<nnvm::NodeEntry>&& v) {
return std::move(v[0]);
} else {
if (v.size() < inplace_sum_cap) {
nnvm::NodePtr sum_node = nnvm::Node::Create();
nnvm::ObjectPtr sum_node = nnvm::Node::Create();
sum_node->attrs.op = ewise_sum_op;
sum_node->attrs.name = "sum_grad";
sum_node->attrs.dict["num_args"] = std::to_string(v.size());
Expand Down Expand Up @@ -285,15 +285,15 @@ nnvm::NodeEntry AggregateGradient(std::vector<nnvm::NodeEntry>&& v) {

std::ostringstream os;
os << "sum_grad_" << i;
nnvm::NodePtr x = nnvm::Node::Create();
nnvm::ObjectPtr x = nnvm::Node::Create();
x->attrs.op = ewise_plus_op;
x->attrs.name = os.str();
x->inputs = {ret, v[i]};
ret = nnvm::NodeEntry(std::move(x), 0, 0);
}
// identity node is used to avoid exposure of dummy plus node
// when its output get assigned to another space.
nnvm::NodePtr id_node = nnvm::Node::Create();
nnvm::ObjectPtr id_node = nnvm::Node::Create();
id_node->attrs.op = identity_op;
id_node->attrs.name = "sum_grad_final";
id_node->inputs = {ret};
Expand Down Expand Up @@ -324,7 +324,7 @@ inline ValueType get_node_attr(
*/
nnvm::Graph GraphExecutor::InitFullGraph(nnvm::Symbol symbol,
const std::vector<OpReqType>& grad_req_types) {
using nnvm::NodePtr;
using nnvm::ObjectPtr;
using nnvm::NodeEntry;
// initial information
num_forward_outputs_ = symbol.outputs.size();
Expand All @@ -346,7 +346,7 @@ nnvm::Graph GraphExecutor::InitFullGraph(nnvm::Symbol symbol,
head_grad_entry_.emplace_back(AttrHint(ngrad, g.outputs[i]));
head_grad_map_[ngrad.node.get()] = i;
}
std::vector<NodePtr> args = symbol.ListInputs(nnvm::Symbol::kReadOnlyArgs);
std::vector<ObjectPtr> args = symbol.ListInputs(nnvm::Symbol::kReadOnlyArgs);
std::vector<NodeEntry> xs;
for (size_t i = 0; i < grad_req_types.size(); ++i) {
if (grad_req_types[i] != kNullOp) {
Expand Down Expand Up @@ -1421,7 +1421,7 @@ void GraphExecutor::ExecuteMonOutputCallback(size_t nid) {
const auto& node = idx[nid].source;
for (size_t i = 0; i < opnode.exec->out_array.size(); ++i) {
NDArray *cpy = new NDArray(opnode.exec->out_array[i]);
nnvm::NodePtr node_ptr = std::make_shared<nnvm::Node>(*node);
nnvm::ObjectPtr node_ptr = std::make_shared<nnvm::Node>(*node);
std::string name = GetOutputName({node_ptr, static_cast<uint32_t >(i), 0});
this->monitor_callback_(name.c_str(), reinterpret_cast<void*>(cpy));
}
Expand Down
8 changes: 4 additions & 4 deletions src/executor/infer_graph_attr_pass.cc
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ inline void GetAttrFromForwardNode(const uint32_t nid,
// gradient function, used to get node correspondence.
static auto& fgrad =
Op::GetAttr<nnvm::FGradient>("FGradient");
nnvm::NodePtr fwd_ptr = inode.source->control_deps[0];
nnvm::ObjectPtr fwd_ptr = inode.source->control_deps[0];
const nnvm::IndexedGraph::Node& fnode = idx[inode.control_deps[0]];
// use gradient function to find out the correspondence.
std::vector<nnvm::NodeEntry> ograd(fwd_ptr->num_outputs());
Expand Down Expand Up @@ -140,7 +140,7 @@ void GetAttrFromFusedNode(uint32_t nid,
// gradient function, used to get node correspondence.
static auto& fgrad =
Op::GetAttr<nnvm::FGradient>("FGradient");
nnvm::NodePtr fused_fwd_ptr = inode.source->control_deps[0];
nnvm::ObjectPtr fused_fwd_ptr = inode.source->control_deps[0];
static auto& finfer_fused_shape =
Op::GetAttr<FAccessSubgraphType>(infer_fusion_name);
auto finfer = finfer_fused_shape.get(fused_fwd_ptr->op(), nullptr);
Expand Down Expand Up @@ -394,7 +394,7 @@ nnvm::Graph InferAttr(nnvm::Graph &&ret,
<< "Backward inference for node attributes is not available";
CHECK_GE(inode.source->control_deps.size(), 1U)
<< "BackwardOp need to have control_deps to its forward op";
nnvm::NodePtr fwd_ptr = inode.source->control_deps[0];
nnvm::ObjectPtr fwd_ptr = inode.source->control_deps[0];
CHECK(fwd_ptr->op() != nullptr) << "Forward op cannot be a variable";

static auto& is_fusion_helper = Op::GetAttr<exec::TIsFusionHelper>("TIsFusionHelper");
Expand Down Expand Up @@ -690,7 +690,7 @@ nnvm::Graph InferShapeAttr(nnvm::Graph &&ret,
<< "Backward inference for node attributes is not available";
CHECK_GE(inode.source->control_deps.size(), 1U)
<< "BackwardOp need to have control_deps to its forward op";
nnvm::NodePtr fwd_ptr = inode.source->control_deps[0];
nnvm::ObjectPtr fwd_ptr = inode.source->control_deps[0];
CHECK(fwd_ptr->op() != nullptr) << "Forward op cannot be a variable";

static auto& is_fusion_helper = Op::GetAttr<exec::TIsFusionHelper>("TIsFusionHelper");
Expand Down
Loading

0 comments on commit 033fba9

Please sign in to comment.