From 372b657900a402a38067c70ab456c8e5ca8dfe6e Mon Sep 17 00:00:00 2001 From: jywu-msft <43355415+jywu-msft@users.noreply.github.com> Date: Fri, 16 Aug 2019 17:44:22 -0700 Subject: [PATCH] update TRT EP CI's to use latest model.zip (#1637) --- onnxruntime/test/onnx/main.cc | 16 +++++++++++++++- .../linux-gpu-tensorrt-ci-pipeline.yml | 3 +-- .../win-gpu-tensorrt-ci-pipeline.yml | 5 +---- 3 files changed, 17 insertions(+), 7 deletions(-) diff --git a/onnxruntime/test/onnx/main.cc b/onnxruntime/test/onnx/main.cc index 59c5ba209445b..d6a0769a7b57f 100644 --- a/onnxruntime/test/onnx/main.cc +++ b/onnxruntime/test/onnx/main.cc @@ -433,7 +433,21 @@ int real_main(int argc, char* argv[], Ort::Env& env) { #endif #endif - +#ifdef USE_TENSORRT + broken_tests.insert({"fp16_shufflenet", "TRT EP bug"}); + broken_tests.insert({"fp16_inception_v1", "TRT EP bug"}); + broken_tests.insert({"fp16_tiny_yolov2", "TRT EP bug"}); + broken_tests.insert({"tf_inception_v3", "TRT Engine couldn't be created"}); + broken_tests.insert({"tf_mobilenet_v1_1.0_224", "TRT Engine couldn't be created"}); + broken_tests.insert({"tf_mobilenet_v2_1.0_224", "TRT Engine couldn't be created"}); + broken_tests.insert({"tf_mobilenet_v2_1.4_224", "TRT Engine couldn't be created"}); + broken_tests.insert({"tf_resnet_v1_101", "TRT Engine couldn't be created"}); + broken_tests.insert({"tf_resnet_v1_152", "TRT Engine couldn't be created"}); + broken_tests.insert({"tf_resnet_v1_50", "TRT Engine couldn't be created"}); + broken_tests.insert({"tf_resnet_v2_101", "TRT Engine couldn't be created"}); + broken_tests.insert({"tf_resnet_v2_152", "TRT Engine couldn't be created"}); + broken_tests.insert({"tf_resnet_v2_50", "TRT Engine couldn't be created"}); +#endif #ifdef USE_CUDA broken_tests.insert({"mxnet_arcface", "result mismatch"}); diff --git a/tools/ci_build/github/azure-pipelines/linux-gpu-tensorrt-ci-pipeline.yml b/tools/ci_build/github/azure-pipelines/linux-gpu-tensorrt-ci-pipeline.yml index 8b5fe3a3cd9eb..23b5ca1e345ff 100644 --- a/tools/ci_build/github/azure-pipelines/linux-gpu-tensorrt-ci-pipeline.yml +++ b/tools/ci_build/github/azure-pipelines/linux-gpu-tensorrt-ci-pipeline.yml @@ -15,8 +15,7 @@ jobs: displayName: 'Download test data' inputs: scriptPath: '$(Build.SourcesDirectory)/tools/ci_build/github/download_test_data.py' - # There are some tests in 20190130.zip that TensorRT can't run. Instead here use 20181210 opset8 for TensorRT test. - arguments: --test_data_url https://onnxruntimetestdata.blob.core.windows.net/models/20181210.zip --build_dir $(Build.BinariesDirectory) + arguments: --test_data_url $(TestDataUrl) --build_dir $(Build.BinariesDirectory) pythonInterpreter: '/usr/bin/python3' workingDirectory: $(Build.BinariesDirectory) diff --git a/tools/ci_build/github/azure-pipelines/win-gpu-tensorrt-ci-pipeline.yml b/tools/ci_build/github/azure-pipelines/win-gpu-tensorrt-ci-pipeline.yml index c11704dbf1f82..88d70de0822fa 100644 --- a/tools/ci_build/github/azure-pipelines/win-gpu-tensorrt-ci-pipeline.yml +++ b/tools/ci_build/github/azure-pipelines/win-gpu-tensorrt-ci-pipeline.yml @@ -6,12 +6,9 @@ jobs: buildDirectory: '$(Build.BinariesDirectory)' OnnxRuntimeBuildDirectory: '$(Build.BinariesDirectory)' CUDA_VERSION: '10.0' - # There are some tests in 20190130.zip that TensorRT can't run. Instead here use 20181210 opset8 for TensorRT test. - TestDataUrl: https://onnxruntimetestdata.blob.core.windows.net/models/20181210.zip - TestDataChecksum: a966def7447f4ff04f5665bca235b3f3 steps: - # - template: templates/set-test-data-variables-step.yml + - template: templates/set-test-data-variables-step.yml - template: templates/windows-build-tools-setup-steps.yml parameters: EnvSetupScript: 'setup_env_cuda.bat'