Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support pir #1389

Merged
merged 27 commits into from
Sep 19, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
1ff4d27
[OSPP][PIR] find op in pir mappers first
qzylalala Sep 7, 2024
01e6795
[OSPP][PIR] support full_int_array op for PIR
qzylalala Sep 7, 2024
8541e40
[OSPP][PIR] PIR Mapper Register
qzylalala Sep 7, 2024
ac1e7a1
[OSPP][PIR] Fix some bugs
qzylalala Sep 8, 2024
57d31cb
[OSPP][PIR] Support pd.full op
qzylalala Sep 8, 2024
f9f253e
[OSPP][PIR] Fix some bugs
qzylalala Sep 8, 2024
1758260
[OSPP][PIR] map relu to relu6
qzylalala Sep 9, 2024
f9f48b1
[OSPP][PIR] re-write mapper interface
qzylalala Sep 10, 2024
7e274b0
[OSPP][PIR] GetInput for PIR
qzylalala Sep 10, 2024
a449ef8
[OSPP][PIR] flatten, scale
qzylalala Sep 10, 2024
e3e89d7
[OSPP][PIR] fix some bugs
qzylalala Sep 10, 2024
21be4ac
[OSPP][PIR] support some operations
qzylalala Sep 11, 2024
8b00bd6
[PIR] add getOpInput && getOpOutput
0x3878f Sep 11, 2024
101c972
[PIR] merge 1377 && add getOpInput,geetOpOutput
0x3878f Sep 11, 2024
d3f739b
fix
risemeup1 Sep 13, 2024
637ba01
[PIR] Correctly obtain the name of the input for operator B from the …
0x3878f Sep 13, 2024
ffb4991
[PIR] map input name to idx && modify GetInput&& GetOput in mapper.h
0x3878f Sep 13, 2024
bab4181
fix: call SetOpInputOutputIndex func in GetMinOpsetVersion for Pool2d…
0x3878f Sep 14, 2024
686a2d8
support get input TensorInfo.name from builtin.parameter op
0x3878f Sep 14, 2024
bf6ab8a
Connect all inputs and outputs in series
0x3878f Sep 14, 2024
7eac514
support PaddlePirParser::GetOpAttrVar && modify pool2d mapper && not …
0x3878f Sep 15, 2024
697ae6f
OpHasInput && OpHasOutput
0x3878f Sep 15, 2024
e67d3d3
Fix: incorrect output shape cause by broadcasting in add op
0x3878f Sep 15, 2024
49973a7
Fix: incorrect value in full op
0x3878f Sep 16, 2024
989d9dc
fix:AdaptivePool kernel size, strides computing
0x3878f Sep 18, 2024
3604164
Commented on the test in file test_auto_scan_elementwise_ops.py
0x3878f Sep 19, 2024
86ff94f
delete useless interface in pir parser
0x3878f Sep 19, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 11 additions & 0 deletions paddle2onnx/mapper/activation/relu6.cc
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,19 @@

namespace paddle2onnx {
REGISTER_MAPPER(relu6, Relu6Mapper)
REGISTER_PIR_MAPPER(relu6, Relu6Mapper)

void Relu6Mapper::SetOpInputOutputIndex() {
input_idx_ = {
{"X", 0},
};
output_idx_ = {
{"Out", 0},
};
}

void Relu6Mapper::Opset7() {
SetOpInputOutputIndex();
auto input_info = GetInput("X");
auto output_info = GetOutput("Out");
float min = 0.0;
Expand Down
4 changes: 4 additions & 0 deletions paddle2onnx/mapper/activation/relu6.h
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,11 @@ class Relu6Mapper : public Mapper {
Relu6Mapper(const PaddleParser& p, OnnxHelper* helper, int64_t block_id,
int64_t op_id)
: Mapper(p, helper, block_id, op_id) {}

Relu6Mapper(const PaddlePirParser& p, OnnxHelper* helper, int64_t op_id)
: Mapper(p, helper, op_id) { in_pir_mode = true; }

void Opset7() override;
void SetOpInputOutputIndex() override;
};
}
80 changes: 51 additions & 29 deletions paddle2onnx/mapper/exporter.cc
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,6 @@
#include "paddle2onnx/optimizer/fuse_paddle_conv_bias.h"
#include "paddle2onnx/optimizer/fuse_unsqueeze_conv2d_squeeze.h"

std::unordered_map<std::string, std::string> op_name_mappings = {
{"matmul", "matmul_v2"},
{"batch_norm_", "batch_norm"},
{"flatten", "flatten_contiguous_range"},
{"add", "elementwise_add"}};

namespace paddle2onnx {
MapperHelper *MapperHelper::helper = nullptr;
Expand All @@ -46,27 +41,20 @@ bool ModelExporter::IsOpsRegistered(const PaddlePirParser &pir_parser,
if (op->name() == "pd_op.data" || op->name() == "pd_op.fetch") {
continue;
}
std::string op_name = op->name();
std::string prefix = "pd_op.";

size_t prefix_pos = op_name.find(prefix);
if (prefix_pos != std::string::npos) {
op_name = op_name.substr(prefix_pos + prefix.size());
}
auto it = op_name_mappings.find(op_name);
if (it != op_name_mappings.end()) {
op_name = it->second;
}
std::string op_name = convert_pir_op_name(op->name());
if (!MapperHelper::Get()->IsRegistered(op_name)) {
unsupported_ops.insert(op_name);
}
}
auto logger = P2OLogger();
logger << "There are some ops not supported yet, including ";
for (auto &item : unsupported_ops) {
logger << item << ",";
// TODO(wangmingkai02) : judge op whether is experimental op
if (unsupported_ops.size() != 0) {
auto logger = P2OLogger();
logger << "There are some ops not supported yet, including ";
for (auto &item : unsupported_ops) {
logger << item << ",";
}
logger << std::endl;
}
logger << std::endl;
return (unsupported_ops.size() == 0);
}

Expand Down Expand Up @@ -173,16 +161,34 @@ int32_t ModelExporter::GetMinOpsetVersion(const PaddlePirParser &pir_parser) {
std::set<std::string> verbose_log;
OnnxHelper helper;
for (auto i = 0; i < pir_parser.global_blocks_ops.size(); i++) {
if (pir_parser.global_blocks_ops[i]->name() == "pd_op.data" ||
pir_parser.global_blocks_ops[i]->name() == "pd_op.fetch") {
std::string op_name = pir_parser.global_blocks_ops[i]->name();
if (op_name == "pd_op.data" || op_name == "pd_op.fetch") {
continue;
}
int current_opset = 7;
P2OLogger() << "GetMinOpsetVersion : i " << std::to_string(i) << " , op : " << op_name << std::endl;
auto mapper = MapperHelper::Get()->CreateMapper(
pir_parser.global_blocks_ops[i]->name(), pir_parser, &helper, i);
convert_pir_op_name(op_name),
pir_parser, &helper, i);
current_opset = mapper->GetMinOpsetVersion(verbose_);
delete mapper;

// TODO : some bugs will appear, not solved yet
// if (current_opset > max_opset) {
// max_opset = current_opset;
// if (current_opset > opset_version_) {
// verbose_log.insert("Due to the operator: " +
// pir_parser.global_blocks_ops[i]->name() + ", " +
// "requires opset_version >= " +
// std::to_string(current_opset) + ".");
// }
// }
}

for (auto iter = verbose_log.begin(); iter != verbose_log.end(); ++iter) {
P2OLogger(verbose_) << *iter << std::endl;
}
return max_opset;
}

void ModelExporter::SetOpsetVersion(const PaddlePirParser &pir_parser,
Expand Down Expand Up @@ -220,6 +226,7 @@ void ModelExporter::SetOpsetVersion(const PaddlePirParser &pir_parser,
P2OLogger(verbose_) << "Use opset_version = " << opset_version_
<< " for ONNX export." << std::endl;
}

void ModelExporter::SetOpsetVersion(const PaddleParser &parser,
bool auto_upgrade_opset) {
// Set the Opset Version of the ONNX model.
Expand Down Expand Up @@ -318,7 +325,7 @@ void ModelExporter::ExportInputOutputs(
}
}

void ExportInputOutputs(
void ModelExporter::ExportInputOutputs(
const PaddlePirParser &pir_parser,
std::vector<std::shared_ptr<ONNX_NAMESPACE::ValueInfoProto>> &inputs,
std::vector<std::shared_ptr<ONNX_NAMESPACE::ValueInfoProto>>
Expand Down Expand Up @@ -347,7 +354,7 @@ void ModelExporter::ExportParameters(
}
}

void ExportParameters(
void ModelExporter::ExportParameters(
const PaddlePirParser &pir_parser,
std::vector<std::shared_ptr<ONNX_NAMESPACE::NodeProto>> &parameters) {
parameters.clear();
Expand Down Expand Up @@ -406,11 +413,23 @@ ONNX_NAMESPACE::GraphProto ModelExporter::ExportBlock(
auto num_ops = pir_parser.global_blocks_ops.size();
temp_helper.nodes.reserve(num_ops * 3);
temp_helper.Clear();
std::cout << "operator num: " << num_ops << std::endl;
for (auto i = 0; i < num_ops; ++i) {
auto op = pir_parser.global_blocks_ops[i];
if (op->name() == "data" || op->name() == "fetch") {
if (op->name() == "pd_op.data" || op->name() == "pd_op.fetch") {
continue;
}
if(op->name() == "pd_op.full_int_array") {
bool needExport = false;
for(auto it = op->result(0).use_begin(); it != op->result(0).use_end(); ++it) {
// if (!(it->owner()->isa<paddle::dialect::Pool2dOp>())) {
if (!(it->owner()->name() == "pd_op.pool2d")){
needExport = true;
break;
}
}
if(!needExport) continue;
}
ExportOp(pir_parser, &temp_helper, opset_version_, op, i, verbose_);
}
for (auto &item : parameters) {
Expand Down Expand Up @@ -572,7 +591,8 @@ void ModelExporter::ExportOp(const PaddlePirParser &pir_parser,
int64_t op_id,
bool verbose) {
auto mapper =
MapperHelper::Get()->CreateMapper(op->name(), pir_parser, helper, op_id);
MapperHelper::Get()->CreateMapper(convert_pir_op_name(op->name()),
pir_parser, helper, op_id);
mapper->deploy_backend = deploy_backend_;
mapper->Run();
delete mapper;
Expand Down Expand Up @@ -754,7 +774,6 @@ std::string ModelExporter::Run(const PaddlePirParser &pir_parser,
verbose_ = verbose;
deploy_backend_ = deploy_backend;
calibration_cache_ = calibration_cache;

// Clear name_counter, this use to generate unique name for intermdiate while
// converting all the op
MapperHelper::Get()->ClearNameCounter();
Expand Down Expand Up @@ -784,6 +803,9 @@ std::string ModelExporter::Run(const PaddlePirParser &pir_parser,
auto share_graph = ExportBlock(pir_parser, parameters, inputs, outputs);
*onnx_model_.mutable_graph() = share_graph;

if (enable_onnx_checker) {
ONNXChecker(onnx_model_, verbose);
}
std::string out;
if (!onnx_model_.SerializeToString(&out)) {
P2OLogger(verbose)
Expand Down
25 changes: 24 additions & 1 deletion paddle2onnx/mapper/exporter.h
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,29 @@
#define PATH_SEP "/"
#endif

inline std::string convert_pir_op_name(const std::string pir_op_name) {
std::unordered_map<std::string, std::string> op_name_mappings = {
{"matmul", "matmul_v2"},
{"relu", "relu6"},
{"batch_norm_", "batch_norm"},
{"flatten", "flatten_contiguous_range"},
{"add", "elementwise_add"}};
std::string op_name = pir_op_name;
std::string prefix = "pd_op.";

size_t prefix_pos = op_name.find(prefix);
if (prefix_pos != std::string::npos) {
op_name = op_name.substr(prefix_pos + prefix.size());
}
auto it = op_name_mappings.find(op_name);
if (it != op_name_mappings.end()) {
op_name = it->second;
}

return op_name;
}


inline std::string GetFilenameFromPath(const std::string &path) {
auto pos = path.find_last_of(PATH_SEP);
if (pos == std::string::npos) {
Expand All @@ -44,7 +67,7 @@ inline std::string GetFilenameFromPath(const std::string &path) {
namespace paddle2onnx {
class ModelExporter {
public:
QuantizeModelProcessor quantize_model_processer;
QuantizeModelProcessor quantize_model_processer;

void SaveExternalData(ONNX_NAMESPACE::GraphProto *graph,
const std::string &external_file_path,
Expand Down
Loading