From 00078cda06eda8b86d0300e0998b13a736622555 Mon Sep 17 00:00:00 2001 From: Klein Hu Date: Thu, 25 Apr 2019 18:12:18 -0700 Subject: [PATCH] Rename to onnxruntime server --- BUILD.md | 4 +- cmake/CMakeLists.txt | 6 +- cmake/get_boost.cmake | 3 + cmake/onnxruntime_hosting.cmake | 120 ------------------ cmake/onnxruntime_server.cmake | 120 ++++++++++++++++++ cmake/onnxruntime_unittests.cmake | 61 ++++----- ..._Usage.md => ONNX_Runtime_Server_Usage.md} | 30 ++--- onnxruntime/{hosting => server}/converter.cc | 4 +- onnxruntime/{hosting => server}/converter.h | 4 +- .../{hosting => server}/environment.cc | 20 +-- onnxruntime/{hosting => server}/environment.h | 12 +- onnxruntime/{hosting => server}/executor.cc | 10 +- onnxruntime/{hosting => server}/executor.h | 14 +- .../{hosting => server}/http/core/context.h | 4 +- .../http/core/http_server.cc | 4 +- .../http/core/http_server.h | 4 +- .../{hosting => server}/http/core/listener.cc | 4 +- .../{hosting => server}/http/core/listener.h | 4 +- .../{hosting => server}/http/core/routes.cc | 4 +- .../{hosting => server}/http/core/routes.h | 4 +- .../{hosting => server}/http/core/session.cc | 4 +- .../{hosting => server}/http/core/session.h | 4 +- .../{hosting => server}/http/core/util.cc | 4 +- .../{hosting => server}/http/core/util.h | 4 +- .../{hosting => server}/http/json_handling.cc | 8 +- .../{hosting => server}/http/json_handling.h | 8 +- .../http/predict_request_handler.cc | 6 +- .../http/predict_request_handler.h | 6 +- onnxruntime/{hosting => server}/http/util.cc | 4 +- onnxruntime/{hosting => server}/http/util.h | 6 +- onnxruntime/{hosting => server}/log_sink.h | 4 +- onnxruntime/{hosting => server}/main.cc | 16 +-- .../protobuf/onnx-ml.proto | 0 .../protobuf/predict.proto | 2 +- .../server_configuration.h | 6 +- onnxruntime/{hosting => server}/util.cc | 4 +- onnxruntime/{hosting => server}/util.h | 4 +- .../test/hosting/integration_tests/README.MD | 43 ------- .../test/server/integration_tests/README.MD | 43 +++++++ .../integration_tests/function_tests.py | 54 ++++---- .../integration_tests/model_zoo_data_prep.py | 0 .../integration_tests/model_zoo_tests.py | 12 +- .../integration_tests/test_main.py | 2 +- .../integration_tests/test_util.py | 24 ++-- .../unit_tests/converter_tests.cc | 90 ++++++------- .../unit_tests/http_routes_tests.cc | 6 +- .../unit_tests/json_handling_tests.cc | 28 ++-- .../unit_tests/server_configuration_test.cc | 18 +-- .../unit_tests/test_main.cc | 0 .../unit_tests/util_tests.cc | 8 +- onnxruntime/test/testdata/hosting/mnist.onnx | Bin 26454 -> 0 bytes .../mnist_test_data_set_0_input.json | 0 .../mnist_test_data_set_0_input.pb | Bin .../mnist_test_data_set_0_output.json | 0 .../mnist_test_data_set_0_output.pb | 0 .../testdata/{hosting => server}/request_0.pb | Bin .../{hosting => server}/response_0.pb | 0 tools/ci_build/build.py | 24 ++-- .../vienna-linux-ci-pipeline.yml | 4 +- ...sting_run_build.sh => server_run_build.sh} | 0 ...ckerbuild.sh => server_run_dockerbuild.sh} | 4 +- 61 files changed, 446 insertions(+), 440 deletions(-) delete mode 100644 cmake/onnxruntime_hosting.cmake create mode 100644 cmake/onnxruntime_server.cmake rename docs/{Hosting_Application_Usage.md => ONNX_Runtime_Server_Usage.md} (88%) rename onnxruntime/{hosting => server}/converter.cc (99%) rename onnxruntime/{hosting => server}/converter.h (96%) rename onnxruntime/{hosting => server}/environment.cc (74%) rename onnxruntime/{hosting => server}/environment.h (82%) rename onnxruntime/{hosting => server}/executor.cc (94%) rename onnxruntime/{hosting => server}/executor.h (75%) rename onnxruntime/{hosting => server}/http/core/context.h (96%) rename onnxruntime/{hosting => server}/http/core/http_server.cc (97%) rename onnxruntime/{hosting => server}/http/core/http_server.h (96%) rename onnxruntime/{hosting => server}/http/core/listener.cc (97%) rename onnxruntime/{hosting => server}/http/core/listener.h (95%) rename onnxruntime/{hosting => server}/http/core/routes.cc (97%) rename onnxruntime/{hosting => server}/http/core/routes.h (96%) rename onnxruntime/{hosting => server}/http/core/session.cc (99%) rename onnxruntime/{hosting => server}/http/core/session.h (98%) rename onnxruntime/{hosting => server}/http/core/util.cc (90%) rename onnxruntime/{hosting => server}/http/core/util.h (90%) rename onnxruntime/{hosting => server}/http/json_handling.cc (88%) rename onnxruntime/{hosting => server}/http/json_handling.h (85%) rename onnxruntime/{hosting => server}/http/predict_request_handler.cc (98%) rename onnxruntime/{hosting => server}/http/predict_request_handler.h (84%) rename onnxruntime/{hosting => server}/http/util.cc (98%) rename onnxruntime/{hosting => server}/http/util.h (92%) rename onnxruntime/{hosting => server}/log_sink.h (90%) rename onnxruntime/{hosting => server}/main.cc (84%) rename onnxruntime/{hosting => server}/protobuf/onnx-ml.proto (100%) rename onnxruntime/{hosting => server}/protobuf/predict.proto (96%) rename onnxruntime/{hosting => server}/server_configuration.h (97%) rename onnxruntime/{hosting => server}/util.cc (97%) rename onnxruntime/{hosting => server}/util.h (90%) delete mode 100644 onnxruntime/test/hosting/integration_tests/README.MD create mode 100644 onnxruntime/test/server/integration_tests/README.MD rename onnxruntime/test/{hosting => server}/integration_tests/function_tests.py (88%) rename onnxruntime/test/{hosting => server}/integration_tests/model_zoo_data_prep.py (100%) rename onnxruntime/test/{hosting => server}/integration_tests/model_zoo_tests.py (88%) rename onnxruntime/test/{hosting => server}/integration_tests/test_main.py (94%) rename onnxruntime/test/{hosting => server}/integration_tests/test_util.py (87%) rename onnxruntime/test/{hosting => server}/unit_tests/converter_tests.cc (88%) rename onnxruntime/test/{hosting => server}/unit_tests/http_routes_tests.cc (98%) rename onnxruntime/test/{hosting => server}/unit_tests/json_handling_tests.cc (93%) rename onnxruntime/test/{hosting => server}/unit_tests/server_configuration_test.cc (88%) rename onnxruntime/test/{hosting => server}/unit_tests/test_main.cc (100%) rename onnxruntime/test/{hosting => server}/unit_tests/util_tests.cc (97%) delete mode 100644 onnxruntime/test/testdata/hosting/mnist.onnx rename onnxruntime/test/testdata/{hosting => server}/mnist_test_data_set_0_input.json (100%) rename onnxruntime/test/testdata/{hosting => server}/mnist_test_data_set_0_input.pb (100%) rename onnxruntime/test/testdata/{hosting => server}/mnist_test_data_set_0_output.json (100%) rename onnxruntime/test/testdata/{hosting => server}/mnist_test_data_set_0_output.pb (100%) rename onnxruntime/test/testdata/{hosting => server}/request_0.pb (100%) rename onnxruntime/test/testdata/{hosting => server}/response_0.pb (100%) rename tools/ci_build/github/linux/{hosting_run_build.sh => server_run_build.sh} (100%) rename tools/ci_build/github/linux/{hosting_run_dockerbuild.sh => server_run_dockerbuild.sh} (93%) diff --git a/BUILD.md b/BUILD.md index db5ca3037f86c..98c1684de5b21 100644 --- a/BUILD.md +++ b/BUILD.md @@ -53,9 +53,9 @@ The complete list of build options can be found by running `./build.sh (or ./bui 1. For Windows, just add --x86 argument when launching build.bat 2. For Linux, it must be built out of a x86 os, --x86 argument also needs be specified to build.sh -## Build Hosting Application on Linux +## Build ONNX Runtime Server on Linux -1. In the ONNX Runtime root folder, run `./build.sh --config RelWithDebInfo --build_hosting --use_openmp --parallel` +1. In the ONNX Runtime root folder, run `./build.sh --config RelWithDebInfo --build_server --use_openmp --parallel` ## Build/Test Flavors for CI diff --git a/cmake/CMakeLists.txt b/cmake/CMakeLists.txt index c717e86b75136..0f5b3d85881ea 100644 --- a/cmake/CMakeLists.txt +++ b/cmake/CMakeLists.txt @@ -70,7 +70,7 @@ option(onnxruntime_USE_BRAINSLICE "Build with BrainSlice" OFF) option(onnxruntime_USE_TENSORRT "Build with TensorRT support" OFF) option(onnxruntime_ENABLE_LTO "Enable link time optimization, which is not stable on older GCCs" OFF) option(onnxruntime_CROSS_COMPILING "Cross compiling onnx runtime" OFF) -option(onnxruntime_BUILD_HOSTING "Build ONNX hosting service" OFF) +option(onnxruntime_BUILD_SERVER "Build ONNX Runtime Server" OFF) option(onnxruntime_USE_FULL_PROTOBUF "Use full protobuf" OFF) option(onnxruntime_DISABLE_CONTRIB_OPS "Disable contrib ops" OFF) option(onnxruntime_USE_EIGEN_THREADPOOL "Use eigen threadpool. Otherwise OpenMP or a homemade one will be used" OFF) @@ -608,8 +608,8 @@ if (onnxruntime_BUILD_SHARED_LIB) include(onnxruntime.cmake) endif() -if (onnxruntime_BUILD_HOSTING) - include(onnxruntime_hosting.cmake) +if (onnxruntime_BUILD_SERVER) + include(onnxruntime_server.cmake) endif() # some of the tests rely on the shared libs to be diff --git a/cmake/get_boost.cmake b/cmake/get_boost.cmake index 5bcefa3479fd4..265d088698571 100644 --- a/cmake/get_boost.cmake +++ b/cmake/get_boost.cmake @@ -1,3 +1,6 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + set(BOOST_REQUESTED_VERSION 1.69.0 CACHE STRING "") set(BOOST_SHA1 8f32d4617390d1c2d16f26a27ab60d97807b35440d45891fa340fc2648b04406 CACHE STRING "") set(BOOST_USE_STATIC_LIBS true CACHE BOOL "") diff --git a/cmake/onnxruntime_hosting.cmake b/cmake/onnxruntime_hosting.cmake deleted file mode 100644 index 691703ac58a45..0000000000000 --- a/cmake/onnxruntime_hosting.cmake +++ /dev/null @@ -1,120 +0,0 @@ -# Copyright (c) Microsoft Corporation. All rights reserved. -# Licensed under the MIT License. - -set(HOSTING_APP_NAME "onnxruntime_hosting") - -# Generate .h and .cc files from protobuf file -add_library(hosting_proto - ${ONNXRUNTIME_ROOT}/hosting/protobuf/predict.proto) -target_include_directories(hosting_proto PUBLIC $ "${CMAKE_CURRENT_BINARY_DIR}/.." ${CMAKE_CURRENT_BINARY_DIR}/onnx) -target_compile_definitions(hosting_proto PUBLIC $) -onnxruntime_protobuf_generate(APPEND_PATH IMPORT_DIRS ${REPO_ROOT}/cmake/external/protobuf/src ${ONNXRUNTIME_ROOT}/hosting/protobuf ${ONNXRUNTIME_ROOT}/core/protobuf TARGET hosting_proto) -add_dependencies(hosting_proto onnx_proto ${onnxruntime_EXTERNAL_DEPENDENCIES}) -if(NOT WIN32) - if(HAS_UNUSED_PARAMETER) - set_source_files_properties(${CMAKE_CURRENT_BINARY_DIR}/model_metadata.pb.cc PROPERTIES COMPILE_FLAGS -Wno-unused-parameter) - set_source_files_properties(${CMAKE_CURRENT_BINARY_DIR}/model_status.pb.cc PROPERTIES COMPILE_FLAGS -Wno-unused-parameter) - set_source_files_properties(${CMAKE_CURRENT_BINARY_DIR}/predict.pb.cc PROPERTIES COMPILE_FLAGS -Wno-unused-parameter) - endif() -endif() - -# Setup dependencies -include(get_boost.cmake) -set(re2_src ${REPO_ROOT}/cmake/external/re2) - -# Setup source code -set(onnxruntime_hosting_lib_srcs - "${ONNXRUNTIME_ROOT}/hosting/http/json_handling.cc" - "${ONNXRUNTIME_ROOT}/hosting/http/predict_request_handler.cc" - "${ONNXRUNTIME_ROOT}/hosting/http/util.cc" - "${ONNXRUNTIME_ROOT}/hosting/environment.cc" - "${ONNXRUNTIME_ROOT}/hosting/executor.cc" - "${ONNXRUNTIME_ROOT}/hosting/converter.cc" - "${ONNXRUNTIME_ROOT}/hosting/util.cc" - ) -if(NOT WIN32) - if(HAS_UNUSED_PARAMETER) - set_source_files_properties(${ONNXRUNTIME_ROOT}/hosting/http/json_handling.cc PROPERTIES COMPILE_FLAGS -Wno-unused-parameter) - set_source_files_properties(${ONNXRUNTIME_ROOT}/hosting/http/predict_request_handler.cc PROPERTIES COMPILE_FLAGS -Wno-unused-parameter) - set_source_files_properties(${ONNXRUNTIME_ROOT}/hosting/executor.cc PROPERTIES COMPILE_FLAGS -Wno-unused-parameter) - set_source_files_properties(${ONNXRUNTIME_ROOT}/hosting/converter.cc PROPERTIES COMPILE_FLAGS -Wno-unused-parameter) - set_source_files_properties(${ONNXRUNTIME_ROOT}/hosting/util.cc PROPERTIES COMPILE_FLAGS -Wno-unused-parameter) - endif() -endif() - -file(GLOB_RECURSE onnxruntime_hosting_http_core_lib_srcs - "${ONNXRUNTIME_ROOT}/hosting/http/core/*.cc" - ) - -file(GLOB_RECURSE onnxruntime_hosting_srcs - "${ONNXRUNTIME_ROOT}/hosting/main.cc" -) - -# HTTP core library -add_library(onnxruntime_hosting_http_core_lib STATIC - ${onnxruntime_hosting_http_core_lib_srcs}) -target_include_directories(onnxruntime_hosting_http_core_lib - PUBLIC - ${ONNXRUNTIME_ROOT}/hosting/http/core - ${Boost_INCLUDE_DIR} - ${re2_src} -) -add_dependencies(onnxruntime_hosting_http_core_lib Boost) -target_link_libraries(onnxruntime_hosting_http_core_lib PRIVATE - ${Boost_LIBRARIES} -) - -# Hosting library -add_library(onnxruntime_hosting_lib ${onnxruntime_hosting_lib_srcs}) -onnxruntime_add_include_to_target(onnxruntime_hosting_lib gsl onnx_proto hosting_proto) -target_include_directories(onnxruntime_hosting_lib PRIVATE - ${ONNXRUNTIME_ROOT} - ${CMAKE_CURRENT_BINARY_DIR}/onnx - ${ONNXRUNTIME_ROOT}/hosting - ${ONNXRUNTIME_ROOT}/hosting/http - PUBLIC - ${Boost_INCLUDE_DIR} - ${re2_src} -) - -target_link_libraries(onnxruntime_hosting_lib PRIVATE - hosting_proto - ${Boost_LIBRARIES} - onnxruntime_hosting_http_core_lib - onnxruntime_session - onnxruntime_optimizer - onnxruntime_providers - onnxruntime_util - onnxruntime_framework - onnxruntime_util - onnxruntime_graph - onnxruntime_common - onnxruntime_mlas - ${onnxruntime_EXTERNAL_LIBRARIES} -) - -# For IDE only -source_group(TREE ${REPO_ROOT} FILES ${onnxruntime_hosting_srcs} ${onnxruntime_hosting_lib_srcs} ${onnxruntime_hosting_lib}) - -# Hosting Application -add_executable(${HOSTING_APP_NAME} ${onnxruntime_hosting_srcs}) -add_dependencies(${HOSTING_APP_NAME} onnx hosting_proto onnx_proto ${onnxruntime_EXTERNAL_DEPENDENCIES}) - -if(NOT WIN32) - if(HAS_UNUSED_PARAMETER) - set_source_files_properties("${ONNXRUNTIME_ROOT}/hosting/main.cc" PROPERTIES COMPILE_FLAGS -Wno-unused-parameter) - endif() -endif() - -onnxruntime_add_include_to_target(${HOSTING_APP_NAME} onnxruntime_session onnxruntime_hosting_lib gsl onnx onnx_proto hosting_proto) - -target_include_directories(${HOSTING_APP_NAME} PRIVATE - ${ONNXRUNTIME_ROOT} - ${ONNXRUNTIME_ROOT}/hosting/http -) - -target_link_libraries(${HOSTING_APP_NAME} PRIVATE - onnxruntime_hosting_http_core_lib - onnxruntime_hosting_lib -) - diff --git a/cmake/onnxruntime_server.cmake b/cmake/onnxruntime_server.cmake new file mode 100644 index 0000000000000..f67df80e4da35 --- /dev/null +++ b/cmake/onnxruntime_server.cmake @@ -0,0 +1,120 @@ +# Copyright (c) Microsoft Corporation. All rights reserved. +# Licensed under the MIT License. + +set(SERVER_APP_NAME "onnxruntime_server") + +# Generate .h and .cc files from protobuf file +add_library(server_proto + ${ONNXRUNTIME_ROOT}/server/protobuf/predict.proto) +target_include_directories(server_proto PUBLIC $ "${CMAKE_CURRENT_BINARY_DIR}/.." ${CMAKE_CURRENT_BINARY_DIR}/onnx) +target_compile_definitions(server_proto PUBLIC $) +onnxruntime_protobuf_generate(APPEND_PATH IMPORT_DIRS ${REPO_ROOT}/cmake/external/protobuf/src ${ONNXRUNTIME_ROOT}/server/protobuf ${ONNXRUNTIME_ROOT}/core/protobuf TARGET server_proto) +add_dependencies(server_proto onnx_proto ${onnxruntime_EXTERNAL_DEPENDENCIES}) +if(NOT WIN32) + if(HAS_UNUSED_PARAMETER) + set_source_files_properties(${CMAKE_CURRENT_BINARY_DIR}/model_metadata.pb.cc PROPERTIES COMPILE_FLAGS -Wno-unused-parameter) + set_source_files_properties(${CMAKE_CURRENT_BINARY_DIR}/model_status.pb.cc PROPERTIES COMPILE_FLAGS -Wno-unused-parameter) + set_source_files_properties(${CMAKE_CURRENT_BINARY_DIR}/predict.pb.cc PROPERTIES COMPILE_FLAGS -Wno-unused-parameter) + endif() +endif() + +# Setup dependencies +include(get_boost.cmake) +set(re2_src ${REPO_ROOT}/cmake/external/re2) + +# Setup source code +set(onnxruntime_server_lib_srcs + "${ONNXRUNTIME_ROOT}/server/http/json_handling.cc" + "${ONNXRUNTIME_ROOT}/server/http/predict_request_handler.cc" + "${ONNXRUNTIME_ROOT}/server/http/util.cc" + "${ONNXRUNTIME_ROOT}/server/environment.cc" + "${ONNXRUNTIME_ROOT}/server/executor.cc" + "${ONNXRUNTIME_ROOT}/server/converter.cc" + "${ONNXRUNTIME_ROOT}/server/util.cc" + ) +if(NOT WIN32) + if(HAS_UNUSED_PARAMETER) + set_source_files_properties(${ONNXRUNTIME_ROOT}/server/http/json_handling.cc PROPERTIES COMPILE_FLAGS -Wno-unused-parameter) + set_source_files_properties(${ONNXRUNTIME_ROOT}/server/http/predict_request_handler.cc PROPERTIES COMPILE_FLAGS -Wno-unused-parameter) + set_source_files_properties(${ONNXRUNTIME_ROOT}/server/executor.cc PROPERTIES COMPILE_FLAGS -Wno-unused-parameter) + set_source_files_properties(${ONNXRUNTIME_ROOT}/server/converter.cc PROPERTIES COMPILE_FLAGS -Wno-unused-parameter) + set_source_files_properties(${ONNXRUNTIME_ROOT}/server/util.cc PROPERTIES COMPILE_FLAGS -Wno-unused-parameter) + endif() +endif() + +file(GLOB_RECURSE onnxruntime_server_http_core_lib_srcs + "${ONNXRUNTIME_ROOT}/server/http/core/*.cc" + ) + +file(GLOB_RECURSE onnxruntime_server_srcs + "${ONNXRUNTIME_ROOT}/server/main.cc" +) + +# HTTP core library +add_library(onnxruntime_server_http_core_lib STATIC + ${onnxruntime_server_http_core_lib_srcs}) +target_include_directories(onnxruntime_server_http_core_lib + PUBLIC + ${ONNXRUNTIME_ROOT}/server/http/core + ${Boost_INCLUDE_DIR} + ${re2_src} +) +add_dependencies(onnxruntime_server_http_core_lib Boost) +target_link_libraries(onnxruntime_server_http_core_lib PRIVATE + ${Boost_LIBRARIES} +) + +# Server library +add_library(onnxruntime_server_lib ${onnxruntime_server_lib_srcs}) +onnxruntime_add_include_to_target(onnxruntime_server_lib gsl onnx_proto server_proto) +target_include_directories(onnxruntime_server_lib PRIVATE + ${ONNXRUNTIME_ROOT} + ${CMAKE_CURRENT_BINARY_DIR}/onnx + ${ONNXRUNTIME_ROOT}/server + ${ONNXRUNTIME_ROOT}/server/http + PUBLIC + ${Boost_INCLUDE_DIR} + ${re2_src} +) + +target_link_libraries(onnxruntime_server_lib PRIVATE + server_proto + ${Boost_LIBRARIES} + onnxruntime_server_http_core_lib + onnxruntime_session + onnxruntime_optimizer + onnxruntime_providers + onnxruntime_util + onnxruntime_framework + onnxruntime_util + onnxruntime_graph + onnxruntime_common + onnxruntime_mlas + ${onnxruntime_EXTERNAL_LIBRARIES} +) + +# For IDE only +source_group(TREE ${REPO_ROOT} FILES ${onnxruntime_server_srcs} ${onnxruntime_server_lib_srcs} ${onnxruntime_server_lib}) + +# Server Application +add_executable(${SERVER_APP_NAME} ${onnxruntime_server_srcs}) +add_dependencies(${SERVER_APP_NAME} onnx server_proto onnx_proto ${onnxruntime_EXTERNAL_DEPENDENCIES}) + +if(NOT WIN32) + if(HAS_UNUSED_PARAMETER) + set_source_files_properties("${ONNXRUNTIME_ROOT}/server/main.cc" PROPERTIES COMPILE_FLAGS -Wno-unused-parameter) + endif() +endif() + +onnxruntime_add_include_to_target(${SERVER_APP_NAME} onnxruntime_session onnxruntime_server_lib gsl onnx onnx_proto server_proto) + +target_include_directories(${SERVER_APP_NAME} PRIVATE + ${ONNXRUNTIME_ROOT} + ${ONNXRUNTIME_ROOT}/server/http +) + +target_link_libraries(${SERVER_APP_NAME} PRIVATE + onnxruntime_server_http_core_lib + onnxruntime_server_lib +) + diff --git a/cmake/onnxruntime_unittests.cmake b/cmake/onnxruntime_unittests.cmake index 04db354db3c90..73b5b48be5fa9 100644 --- a/cmake/onnxruntime_unittests.cmake +++ b/cmake/onnxruntime_unittests.cmake @@ -162,9 +162,9 @@ set(onnxruntime_test_framework_libs onnxruntime_mlas ) -set(onnxruntime_test_hosting_libs +set(onnxruntime_test_server_libs onnxruntime_test_utils - onnxruntime_test_utils_for_hosting + onnxruntime_test_utils_for_server ) if(WIN32) @@ -552,52 +552,55 @@ if (onnxruntime_BUILD_SHARED_LIB) endif() endif() -if (onnxruntime_BUILD_HOSTING) - file(GLOB onnxruntime_test_hosting_src - "${TEST_SRC_DIR}/hosting/unit_tests/*.cc" - "${TEST_SRC_DIR}/hosting/unit_tests/*.h" +if (onnxruntime_BUILD_SERVER) + file(GLOB onnxruntime_test_server_src + "${TEST_SRC_DIR}/server/unit_tests/*.cc" + "${TEST_SRC_DIR}/server/unit_tests/*.h" ) - file(GLOB onnxruntime_integration_test_hosting_src - "${TEST_SRC_DIR}/hosting/integration_tests/*.py" + file(GLOB onnxruntime_integration_test_server_src + "${TEST_SRC_DIR}/server/integration_tests/*.py" ) if(NOT WIN32) if(HAS_UNUSED_PARAMETER) - set_source_files_properties("${TEST_SRC_DIR}/hosting/unit_tests/json_handling_tests.cc" PROPERTIES COMPILE_FLAGS -Wno-unused-parameter) - set_source_files_properties("${TEST_SRC_DIR}/hosting/unit_tests/converter_tests.cc" PROPERTIES COMPILE_FLAGS -Wno-unused-parameter) - set_source_files_properties("${TEST_SRC_DIR}/hosting/unit_tests/util_tests.cc" PROPERTIES COMPILE_FLAGS -Wno-unused-parameter) + set_source_files_properties("${TEST_SRC_DIR}/server/unit_tests/json_handling_tests.cc" PROPERTIES COMPILE_FLAGS -Wno-unused-parameter) + set_source_files_properties("${TEST_SRC_DIR}/server/unit_tests/converter_tests.cc" PROPERTIES COMPILE_FLAGS -Wno-unused-parameter) + set_source_files_properties("${TEST_SRC_DIR}/server/unit_tests/util_tests.cc" PROPERTIES COMPILE_FLAGS -Wno-unused-parameter) endif() endif() - add_library(onnxruntime_test_utils_for_hosting ${onnxruntime_test_hosting_src}) - onnxruntime_add_include_to_target(onnxruntime_test_utils_for_hosting onnxruntime_test_utils gtest gmock gsl onnx onnx_proto hosting_proto) - add_dependencies(onnxruntime_test_utils_for_hosting onnxruntime_hosting_lib onnxruntime_hosting_http_core_lib Boost ${onnxruntime_EXTERNAL_DEPENDENCIES}) - target_include_directories(onnxruntime_test_utils_for_hosting PUBLIC ${Boost_INCLUDE_DIR} ${REPO_ROOT}/cmake/external/re2 ${CMAKE_CURRENT_BINARY_DIR}/onnx ${ONNXRUNTIME_ROOT}/hosting/http ${ONNXRUNTIME_ROOT}/hosting/http/core PRIVATE ${ONNXRUNTIME_ROOT} ) - target_link_libraries(onnxruntime_test_utils_for_hosting ${Boost_LIBRARIES} ${onnx_test_libs}) + add_library(onnxruntime_test_utils_for_server ${onnxruntime_test_server_src}) + onnxruntime_add_include_to_target(onnxruntime_test_utils_for_server onnxruntime_test_utils gtest gmock gsl onnx onnx_proto server_proto) + add_dependencies(onnxruntime_test_utils_for_server onnxruntime_server_lib onnxruntime_server_http_core_lib Boost ${onnxruntime_EXTERNAL_DEPENDENCIES}) + target_include_directories(onnxruntime_test_utils_for_server PUBLIC ${Boost_INCLUDE_DIR} ${REPO_ROOT}/cmake/external/re2 ${CMAKE_CURRENT_BINARY_DIR}/onnx ${ONNXRUNTIME_ROOT}/server/http ${ONNXRUNTIME_ROOT}/server/http/core PRIVATE ${ONNXRUNTIME_ROOT} ) + target_link_libraries(onnxruntime_test_utils_for_server ${Boost_LIBRARIES} ${onnx_test_libs}) AddTest( - TARGET onnxruntime_hosting_tests - SOURCES ${onnxruntime_test_hosting_src} - LIBS ${onnxruntime_test_hosting_libs} hosting_proto onnxruntime_hosting_lib ${onnxruntime_test_providers_libs} + TARGET onnxruntime_server_tests + SOURCES ${onnxruntime_test_server_src} + LIBS ${onnxruntime_test_server_libs} server_proto onnxruntime_server_lib ${onnxruntime_test_providers_libs} DEPENDS ${onnxruntime_EXTERNAL_DEPENDENCIES} ) onnxruntime_protobuf_generate( - APPEND_PATH IMPORT_DIRS ${REPO_ROOT}/cmake/external/protobuf/src ${ONNXRUNTIME_ROOT}/hosting/protobuf ${ONNXRUNTIME_ROOT}/core/protobuf - PROTOS ${ONNXRUNTIME_ROOT}/hosting/protobuf/predict.proto ${ONNXRUNTIME_ROOT}/hosting/protobuf/onnx-ml.proto + APPEND_PATH IMPORT_DIRS ${REPO_ROOT}/cmake/external/protobuf/src ${ONNXRUNTIME_ROOT}/server/protobuf ${ONNXRUNTIME_ROOT}/core/protobuf + PROTOS ${ONNXRUNTIME_ROOT}/server/protobuf/predict.proto ${ONNXRUNTIME_ROOT}/server/protobuf/onnx-ml.proto LANGUAGE python - TARGET onnxruntime_hosting_tests - OUT_VAR hosting_test_py) + TARGET onnxruntime_server_tests + OUT_VAR server_test_py) add_custom_command( - TARGET onnxruntime_hosting_tests POST_BUILD - COMMAND ${CMAKE_COMMAND} -E make_directory $/hosting_test + TARGET onnxruntime_server_tests POST_BUILD + COMMAND ${CMAKE_COMMAND} -E make_directory ${CMAKE_CURRENT_BINARY_DIR}/server_test COMMAND ${CMAKE_COMMAND} -E copy - ${onnxruntime_integration_test_hosting_src} - $/hosting_test/ + ${onnxruntime_integration_test_server_src} + ${CMAKE_CURRENT_BINARY_DIR}/server_test/ COMMAND ${CMAKE_COMMAND} -E copy - $/*_pb2.py - $/hosting_test/ + ${CMAKE_CURRENT_BINARY_DIR}/onnx_ml_pb2.py + ${CMAKE_CURRENT_BINARY_DIR}/server_test/ + COMMAND ${CMAKE_COMMAND} -E copy + ${CMAKE_CURRENT_BINARY_DIR}/predict_pb2.py + ${CMAKE_CURRENT_BINARY_DIR}/server_test/ ) endif() diff --git a/docs/Hosting_Application_Usage.md b/docs/ONNX_Runtime_Server_Usage.md similarity index 88% rename from docs/Hosting_Application_Usage.md rename to docs/ONNX_Runtime_Server_Usage.md index a542ea32a46a9..ac23bbb83726a 100644 --- a/docs/Hosting_Application_Usage.md +++ b/docs/ONNX_Runtime_Server_Usage.md @@ -1,21 +1,21 @@ -

Note: Hosting Application is still in beta state. It's currently not ready for production environments.

+

Note: ONNX Runtime Server is still in beta state. It's currently not ready for production environments.

-# How to Use ONNX Hosting Application REST API for Prediction +# How to Use ONNX Runtime Server REST API for Prediction -ONNX Hosting provides a REST API for prediction. The goal of the project is to make it easy to "host" any ONNX model as a RESTful service. The CLI command to start the service is shown below: +ONNX Runtime Server provides a REST API for prediction. The goal of the project is to make it easy to "host" any ONNX model as a RESTful service. The CLI command to start the service is shown below: ``` -~./onnxruntime_hosting --help -ONNX Hosting: host an ONNX model for inferencing with ONNXRuntime +$ ./onnxruntime_server +the option '--model_path' is required but missing Allowed options: - -h [ --help ] Shows a help message and exits - --log_level arg (=info) Logging level. Allowed options (case - sensitive): verbose, info, warning, error, - fatal - -m [ --model_path ] arg Path to ONNX model - -a [ --address ] arg (=0.0.0.0) The base HTTP address - --http_port arg (=8001) HTTP port to listen to requests - --num_http_threads arg (=8) Number of http threads + -h [ --help ] Shows a help message and exits + --log_level arg (=info) Logging level. Allowed options (case sensitive): + verbose, info, warning, error, fatal + --model_path arg Path to ONNX model + --address arg (=0.0.0.0) The base HTTP address + --http_port arg (=8001) HTTP port to listen to requests + --num_http_threads arg (=<# of your cpu cores>) Number of http threads + ``` @@ -26,7 +26,7 @@ Note: The only mandatory argument for the program here is `model_path` To host an ONNX model as a REST API server, run: ``` -./onnxruntime_hosting -m /// +./onnxruntime_server --model_path /// ``` The prediction URL is in this format: @@ -44,7 +44,7 @@ An HTTP request can be a Protobuf message in two formats: binary or JSON. The HT * For `"Content-Type: application/json"`, the payload will be deserialized as JSON string in UTF-8 format * For `"Content-Type: application/vnd.google.protobuf"`, `"Content-Type: application/x-protobuf"` or `"Content-Type: application/octet-stream"`, the payload will be consumed as protobuf message directly. -The Protobuf definition can be found [here](https://github.com/Microsoft/onnxruntime/blob/master/onnxruntime/hosting/protobuf/predict.proto). +The Protobuf definition can be found [here](https://github.com/Microsoft/onnxruntime/blob/master/onnxruntime/server/protobuf/predict.proto). ## Inferencing diff --git a/onnxruntime/hosting/converter.cc b/onnxruntime/server/converter.cc similarity index 99% rename from onnxruntime/hosting/converter.cc rename to onnxruntime/server/converter.cc index 5b2852fe7bc1b..56fd51cd10c4c 100644 --- a/onnxruntime/hosting/converter.cc +++ b/onnxruntime/server/converter.cc @@ -17,7 +17,7 @@ #include "converter.h" namespace onnxruntime { -namespace hosting { +namespace server { namespace protobufutil = google::protobuf::util; @@ -257,5 +257,5 @@ common::Status MLValueToTensorProto(const onnxruntime::MLValue& ml_value, bool u return common::Status::OK(); } -} // namespace hosting +} // namespace server } // namespace onnxruntime \ No newline at end of file diff --git a/onnxruntime/hosting/converter.h b/onnxruntime/server/converter.h similarity index 96% rename from onnxruntime/hosting/converter.h rename to onnxruntime/server/converter.h index bfa64cf9ca009..983c9f454596c 100644 --- a/onnxruntime/hosting/converter.h +++ b/onnxruntime/server/converter.h @@ -11,7 +11,7 @@ #include "predict.pb.h" namespace onnxruntime { -namespace hosting { +namespace server { onnx::TensorProto_DataType MLDataTypeToTensorProtoDataType(const onnxruntime::DataTypeImpl* cpp_type); @@ -25,5 +25,5 @@ common::Status MLValueToTensorProto(const onnxruntime::MLValue& ml_value, bool u std::unique_ptr logger, /* out */ onnx::TensorProto& tensor_proto); -} // namespace hosting +} // namespace server } // namespace onnxruntime diff --git a/onnxruntime/hosting/environment.cc b/onnxruntime/server/environment.cc similarity index 74% rename from onnxruntime/hosting/environment.cc rename to onnxruntime/server/environment.cc index 51352b9f99346..5235cad266d8c 100644 --- a/onnxruntime/hosting/environment.cc +++ b/onnxruntime/server/environment.cc @@ -8,10 +8,10 @@ #include "log_sink.h" namespace onnxruntime { -namespace hosting { +namespace server { -HostingEnvironment::HostingEnvironment(logging::Severity severity) : severity_(severity), - logger_id_("HostingApp"), +ServerEnvironment::ServerEnvironment(logging::Severity severity) : severity_(severity), + logger_id_("ServerApp"), default_logging_manager_( std::unique_ptr{new LogSink{}}, severity, @@ -24,7 +24,7 @@ HostingEnvironment::HostingEnvironment(logging::Severity severity) : severity_(s session = std::make_unique(options_, &default_logging_manager_); } -common::Status HostingEnvironment::InitializeModel(const std::string& model_path) { +common::Status ServerEnvironment::InitializeModel(const std::string& model_path) { auto status = session->Load(model_path); if (!status.IsOK()) { return status; @@ -42,19 +42,19 @@ common::Status HostingEnvironment::InitializeModel(const std::string& model_path return common::Status::OK(); } -const std::vector& HostingEnvironment::GetModelOutputNames() const { +const std::vector& ServerEnvironment::GetModelOutputNames() const { return model_output_names_; } -const logging::Logger& HostingEnvironment::GetAppLogger() const { +const logging::Logger& ServerEnvironment::GetAppLogger() const { return default_logging_manager_.DefaultLogger(); } -logging::Severity HostingEnvironment::GetLogSeverity() const { +logging::Severity ServerEnvironment::GetLogSeverity() const { return severity_; } -std::unique_ptr HostingEnvironment::GetLogger(const std::string& id) { +std::unique_ptr ServerEnvironment::GetLogger(const std::string& id) { if (id.empty()) { LOGS(GetAppLogger(), WARNING) << "Request id is null or empty string"; } @@ -62,9 +62,9 @@ std::unique_ptr HostingEnvironment::GetLogger(const std::string return default_logging_manager_.CreateLogger(id, severity_, false); } -onnxruntime::InferenceSession* HostingEnvironment::GetSession() const { +onnxruntime::InferenceSession* ServerEnvironment::GetSession() const { return session.get(); } -} // namespace hosting +} // namespace server } // namespace onnxruntime diff --git a/onnxruntime/hosting/environment.h b/onnxruntime/server/environment.h similarity index 82% rename from onnxruntime/hosting/environment.h rename to onnxruntime/server/environment.h index 011fc1336710f..4e0e408a7f20f 100644 --- a/onnxruntime/hosting/environment.h +++ b/onnxruntime/server/environment.h @@ -11,15 +11,15 @@ #include "core/session/inference_session.h" namespace onnxruntime { -namespace hosting { +namespace server { namespace logging = logging; -class HostingEnvironment { +class ServerEnvironment { public: - explicit HostingEnvironment(logging::Severity severity); - ~HostingEnvironment() = default; - HostingEnvironment(const HostingEnvironment&) = delete; + explicit ServerEnvironment(logging::Severity severity); + ~ServerEnvironment() = default; + ServerEnvironment(const ServerEnvironment&) = delete; const logging::Logger& GetAppLogger() const; std::unique_ptr GetLogger(const std::string& id); @@ -41,5 +41,5 @@ class HostingEnvironment { std::vector model_output_names_; }; -} // namespace hosting +} // namespace server } // namespace onnxruntime diff --git a/onnxruntime/hosting/executor.cc b/onnxruntime/server/executor.cc similarity index 94% rename from onnxruntime/hosting/executor.cc rename to onnxruntime/server/executor.cc index 0a3f26d215548..48535129b9028 100644 --- a/onnxruntime/hosting/executor.cc +++ b/onnxruntime/server/executor.cc @@ -20,7 +20,7 @@ #include "util.h" namespace onnxruntime { -namespace hosting { +namespace server { namespace protobufutil = google::protobuf::util; @@ -51,7 +51,7 @@ protobufutil::Status Executor::SetMLValue(const onnx::TensorProto& input_tensor, return protobufutil::Status::OK; } -protobufutil::Status Executor::SetNameMLValueMap(onnxruntime::NameMLValMap& name_value_map, const onnxruntime::hosting::PredictRequest& request) { +protobufutil::Status Executor::SetNameMLValueMap(onnxruntime::NameMLValMap& name_value_map, const onnxruntime::server::PredictRequest& request) { auto logger = env_->GetLogger(request_id_); OrtAllocatorInfo* cpu_allocator_info = nullptr; @@ -84,8 +84,8 @@ protobufutil::Status Executor::SetNameMLValueMap(onnxruntime::NameMLValMap& name protobufutil::Status Executor::Predict(const std::string& model_name, const std::string& model_version, - onnxruntime::hosting::PredictRequest& request, - /* out */ onnxruntime::hosting::PredictResponse& response) { + onnxruntime::server::PredictRequest& request, + /* out */ onnxruntime::server::PredictResponse& response) { auto logger = env_->GetLogger(request_id_); // Convert PredictRequest to NameMLValMap @@ -144,5 +144,5 @@ protobufutil::Status Executor::Predict(const std::string& model_name, return protobufutil::Status::OK; } -} // namespace hosting +} // namespace server } // namespace onnxruntime \ No newline at end of file diff --git a/onnxruntime/hosting/executor.h b/onnxruntime/server/executor.h similarity index 75% rename from onnxruntime/hosting/executor.h rename to onnxruntime/server/executor.h index 17a0fb5ab8321..47aaa49b84d07 100644 --- a/onnxruntime/hosting/executor.h +++ b/onnxruntime/server/executor.h @@ -9,22 +9,22 @@ #include "predict.pb.h" namespace onnxruntime { -namespace hosting { +namespace server { class Executor { public: - Executor(HostingEnvironment* hosting_env, std::string request_id) : env_(hosting_env), + Executor(ServerEnvironment* server_env, std::string request_id) : env_(server_env), request_id_(std::move(request_id)), using_raw_data_(true) {} // Prediction method google::protobuf::util::Status Predict(const std::string& model_name, const std::string& model_version, - onnxruntime::hosting::PredictRequest& request, - /* out */ onnxruntime::hosting::PredictResponse& response); + onnxruntime::server::PredictRequest& request, + /* out */ onnxruntime::server::PredictResponse& response); private: - HostingEnvironment* env_; + ServerEnvironment* env_; const std::string request_id_; bool using_raw_data_; @@ -32,8 +32,8 @@ class Executor { OrtAllocatorInfo* cpu_allocator_info, /* out */ MLValue& ml_value); - google::protobuf::util::Status SetNameMLValueMap(onnxruntime::NameMLValMap& name_value_map, const onnxruntime::hosting::PredictRequest& request); + google::protobuf::util::Status SetNameMLValueMap(onnxruntime::NameMLValMap& name_value_map, const onnxruntime::server::PredictRequest& request); }; -} // namespace hosting +} // namespace server } // namespace onnxruntime diff --git a/onnxruntime/hosting/http/core/context.h b/onnxruntime/server/http/core/context.h similarity index 96% rename from onnxruntime/hosting/http/core/context.h rename to onnxruntime/server/http/core/context.h index 63ad0ef009a10..b9598762fd5a5 100644 --- a/onnxruntime/hosting/http/core/context.h +++ b/onnxruntime/server/http/core/context.h @@ -16,7 +16,7 @@ #include namespace onnxruntime { -namespace hosting { +namespace server { namespace http = boost::beast::http; // from @@ -42,5 +42,5 @@ class HttpContext { HttpContext(const HttpContext&) = delete; }; -} // namespace hosting +} // namespace server } // namespace onnxruntime diff --git a/onnxruntime/hosting/http/core/http_server.cc b/onnxruntime/server/http/core/http_server.cc similarity index 97% rename from onnxruntime/hosting/http/core/http_server.cc rename to onnxruntime/server/http/core/http_server.cc index 134e8a09d83a9..a7ae6f7cbe6c1 100644 --- a/onnxruntime/hosting/http/core/http_server.cc +++ b/onnxruntime/server/http/core/http_server.cc @@ -20,7 +20,7 @@ namespace net = boost::asio; // from using tcp = boost::asio::ip::tcp; // from namespace onnxruntime { -namespace hosting { +namespace server { App::App() { http_details.address = boost::asio::ip::make_address_v4("0.0.0.0"); @@ -84,5 +84,5 @@ App& App::Run() { ioc.run(); return *this; } -} // namespace hosting +} // namespace server } // namespace onnxruntime diff --git a/onnxruntime/hosting/http/core/http_server.h b/onnxruntime/server/http/core/http_server.h similarity index 96% rename from onnxruntime/hosting/http/core/http_server.h rename to onnxruntime/server/http/core/http_server.h index e28d12939589c..505d93668cb47 100644 --- a/onnxruntime/hosting/http/core/http_server.h +++ b/onnxruntime/server/http/core/http_server.h @@ -17,7 +17,7 @@ #include "listener.h" namespace onnxruntime { -namespace hosting { +namespace server { namespace http = beast::http; // from namespace net = boost::asio; // from @@ -49,5 +49,5 @@ class App { StartFn on_start_ = {}; Details http_details{}; }; -} // namespace hosting +} // namespace server } // namespace onnxruntime diff --git a/onnxruntime/hosting/http/core/listener.cc b/onnxruntime/server/http/core/listener.cc similarity index 97% rename from onnxruntime/hosting/http/core/listener.cc rename to onnxruntime/server/http/core/listener.cc index c30b936119df2..93e2e4ac68eae 100644 --- a/onnxruntime/hosting/http/core/listener.cc +++ b/onnxruntime/server/http/core/listener.cc @@ -6,7 +6,7 @@ #include "util.h" namespace onnxruntime { -namespace hosting { +namespace server { namespace net = boost::asio; // from using tcp = boost::asio::ip::tcp; // from @@ -78,5 +78,5 @@ void Listener::OnAccept(beast::error_code ec) { // Accept another connection DoAccept(); } -} // namespace hosting +} // namespace server } // namespace onnxruntime \ No newline at end of file diff --git a/onnxruntime/hosting/http/core/listener.h b/onnxruntime/server/http/core/listener.h similarity index 95% rename from onnxruntime/hosting/http/core/listener.h rename to onnxruntime/server/http/core/listener.h index 0415c0fb93c27..3295e6a448cb4 100644 --- a/onnxruntime/hosting/http/core/listener.h +++ b/onnxruntime/server/http/core/listener.h @@ -11,7 +11,7 @@ #include "util.h" namespace onnxruntime { -namespace hosting { +namespace server { namespace net = boost::asio; // from using tcp = boost::asio::ip::tcp; // from @@ -39,6 +39,6 @@ class Listener : public std::enable_shared_from_this { void OnAccept(beast::error_code ec); }; -} // namespace hosting +} // namespace server } // namespace onnxruntime diff --git a/onnxruntime/hosting/http/core/routes.cc b/onnxruntime/server/http/core/routes.cc similarity index 97% rename from onnxruntime/hosting/http/core/routes.cc rename to onnxruntime/server/http/core/routes.cc index af174c32096a3..5bb0878845b34 100644 --- a/onnxruntime/hosting/http/core/routes.cc +++ b/onnxruntime/server/http/core/routes.cc @@ -8,7 +8,7 @@ #include "routes.h" namespace onnxruntime { -namespace hosting { +namespace server { namespace http = boost::beast::http; // from @@ -77,5 +77,5 @@ http::status Routes::ParseUrl(http::verb method, return http::status::ok; } -} //namespace hosting +} //namespace server } // namespace onnxruntime \ No newline at end of file diff --git a/onnxruntime/hosting/http/core/routes.h b/onnxruntime/server/http/core/routes.h similarity index 96% rename from onnxruntime/hosting/http/core/routes.h rename to onnxruntime/server/http/core/routes.h index 29a1064edf7b0..5681f2437d605 100644 --- a/onnxruntime/hosting/http/core/routes.h +++ b/onnxruntime/server/http/core/routes.h @@ -8,7 +8,7 @@ #include "context.h" namespace onnxruntime { -namespace hosting { +namespace server { namespace http = boost::beast::http; // from @@ -36,6 +36,6 @@ class Routes { std::vector> get_fn_table; }; -} //namespace hosting +} //namespace server } // namespace onnxruntime diff --git a/onnxruntime/hosting/http/core/session.cc b/onnxruntime/server/http/core/session.cc similarity index 99% rename from onnxruntime/hosting/http/core/session.cc rename to onnxruntime/server/http/core/session.cc index 6fe9eec742d6f..e7463ade65226 100644 --- a/onnxruntime/hosting/http/core/session.cc +++ b/onnxruntime/server/http/core/session.cc @@ -4,7 +4,7 @@ #include "session.h" namespace onnxruntime { -namespace hosting { +namespace server { namespace net = boost::asio; // from namespace beast = boost::beast; // from @@ -148,5 +148,5 @@ http::status HttpSession::ExecuteUserFunction(HttpContext& context) { return http::status::ok; } -} // namespace hosting +} // namespace server } // namespace onnxruntime \ No newline at end of file diff --git a/onnxruntime/hosting/http/core/session.h b/onnxruntime/server/http/core/session.h similarity index 98% rename from onnxruntime/hosting/http/core/session.h rename to onnxruntime/server/http/core/session.h index a3827c52ba9c4..c1c8dd5f3c0a6 100644 --- a/onnxruntime/hosting/http/core/session.h +++ b/onnxruntime/server/http/core/session.h @@ -15,7 +15,7 @@ #include "util.h" namespace onnxruntime { -namespace hosting { +namespace server { namespace net = boost::asio; // from namespace beast = boost::beast; // from @@ -74,6 +74,6 @@ class HttpSession : public std::enable_shared_from_this { void DoClose(); }; -} // namespace hosting +} // namespace server } // namespace onnxruntime diff --git a/onnxruntime/hosting/http/core/util.cc b/onnxruntime/server/http/core/util.cc similarity index 90% rename from onnxruntime/hosting/http/core/util.cc rename to onnxruntime/server/http/core/util.cc index eb23a067bac76..a5680b800c4e4 100644 --- a/onnxruntime/hosting/http/core/util.cc +++ b/onnxruntime/server/http/core/util.cc @@ -9,12 +9,12 @@ #include "util.h" namespace onnxruntime { -namespace hosting { +namespace server { // Report a failure void ErrorHandling(beast::error_code ec, char const* what) { std::cerr << what << " failed: " << ec.value() << " : " << ec.message() << "\n"; } -} // namespace hosting +} // namespace server } // namespace onnxruntime \ No newline at end of file diff --git a/onnxruntime/hosting/http/core/util.h b/onnxruntime/server/http/core/util.h similarity index 90% rename from onnxruntime/hosting/http/core/util.h rename to onnxruntime/server/http/core/util.h index 5d73000c2a866..54faea9629ff8 100644 --- a/onnxruntime/hosting/http/core/util.h +++ b/onnxruntime/server/http/core/util.h @@ -9,13 +9,13 @@ #include "context.h" namespace onnxruntime { -namespace hosting { +namespace server { namespace beast = boost::beast; // from // Report a failure void ErrorHandling(beast::error_code ec, char const* what); -} // namespace hosting +} // namespace server } // namespace onnxruntime diff --git a/onnxruntime/hosting/http/json_handling.cc b/onnxruntime/server/http/json_handling.cc similarity index 88% rename from onnxruntime/hosting/http/json_handling.cc rename to onnxruntime/server/http/json_handling.cc index caae91b00a5b6..6bc46b7878338 100644 --- a/onnxruntime/hosting/http/json_handling.cc +++ b/onnxruntime/server/http/json_handling.cc @@ -13,9 +13,9 @@ namespace protobufutil = google::protobuf::util; namespace onnxruntime { -namespace hosting { +namespace server { -protobufutil::Status GetRequestFromJson(const std::string& json_string, /* out */ onnxruntime::hosting::PredictRequest& request) { +protobufutil::Status GetRequestFromJson(const std::string& json_string, /* out */ onnxruntime::server::PredictRequest& request) { protobufutil::JsonParseOptions options; options.ignore_unknown_fields = true; @@ -23,7 +23,7 @@ protobufutil::Status GetRequestFromJson(const std::string& json_string, /* out * return result; } -protobufutil::Status GenerateResponseInJson(const onnxruntime::hosting::PredictResponse& response, /* out */ std::string& json_string) { +protobufutil::Status GenerateResponseInJson(const onnxruntime::server::PredictResponse& response, /* out */ std::string& json_string) { protobufutil::JsonPrintOptions options; options.add_whitespace = false; options.always_print_primitive_fields = false; @@ -62,5 +62,5 @@ std::string escape_string(const std::string& message) { return o.str(); } -} // namespace hosting +} // namespace server } // namespace onnxruntime \ No newline at end of file diff --git a/onnxruntime/hosting/http/json_handling.h b/onnxruntime/server/http/json_handling.h similarity index 85% rename from onnxruntime/hosting/http/json_handling.h rename to onnxruntime/server/http/json_handling.h index ce6c7f9e9cbeb..1e3d8f7239db1 100644 --- a/onnxruntime/hosting/http/json_handling.h +++ b/onnxruntime/server/http/json_handling.h @@ -9,18 +9,18 @@ #include "predict.pb.h" namespace onnxruntime { -namespace hosting { +namespace server { namespace http = boost::beast::http; // Deserialize Json input to PredictRequest. // Unknown fields in the json file will be ignored. -google::protobuf::util::Status GetRequestFromJson(const std::string& json_string, /* out */ onnxruntime::hosting::PredictRequest& request); +google::protobuf::util::Status GetRequestFromJson(const std::string& json_string, /* out */ onnxruntime::server::PredictRequest& request); // Serialize PredictResponse to json string // 1. Proto3 primitive fields with default values will be omitted in JSON output. Eg. int32 field with value 0 will be omitted // 2. Enums will be printed as string, not int, to improve readability -google::protobuf::util::Status GenerateResponseInJson(const onnxruntime::hosting::PredictResponse& response, /* out */ std::string& json_string); +google::protobuf::util::Status GenerateResponseInJson(const onnxruntime::server::PredictResponse& response, /* out */ std::string& json_string); // Constructs JSON error message from error code object and error message std::string CreateJsonError(http::status error_code, const std::string& error_message); @@ -29,6 +29,6 @@ std::string CreateJsonError(http::status error_code, const std::string& error_me // Mostly taken from here: https://stackoverflow.com/questions/7724448/simple-json-string-escape-for-c/33799784#33799784 std::string escape_string(const std::string& message); -} // namespace hosting +} // namespace server } // namespace onnxruntime diff --git a/onnxruntime/hosting/http/predict_request_handler.cc b/onnxruntime/server/http/predict_request_handler.cc similarity index 98% rename from onnxruntime/hosting/http/predict_request_handler.cc rename to onnxruntime/server/http/predict_request_handler.cc index f3fadd32c53db..32d6443744e3d 100644 --- a/onnxruntime/hosting/http/predict_request_handler.cc +++ b/onnxruntime/server/http/predict_request_handler.cc @@ -10,7 +10,7 @@ #include "util.h" namespace onnxruntime { -namespace hosting { +namespace server { namespace protobufutil = google::protobuf::util; @@ -35,7 +35,7 @@ void Predict(const std::string& name, const std::string& version, const std::string& action, /* in, out */ HttpContext& context, - std::shared_ptr env) { + std::shared_ptr env) { auto logger = env->GetLogger(context.request_id); LOGS(*logger, INFO) << "Model Name: " << name << ", Version: " << version << ", Action: " << action; @@ -130,5 +130,5 @@ static bool ParseRequestPayload(const HttpContext& context, SupportedContentType return true; } -} // namespace hosting +} // namespace server } // namespace onnxruntime diff --git a/onnxruntime/hosting/http/predict_request_handler.h b/onnxruntime/server/http/predict_request_handler.h similarity index 84% rename from onnxruntime/hosting/http/predict_request_handler.h rename to onnxruntime/server/http/predict_request_handler.h index 0b1cce1216f36..8fe0d7f74d263 100644 --- a/onnxruntime/hosting/http/predict_request_handler.h +++ b/onnxruntime/server/http/predict_request_handler.h @@ -5,7 +5,7 @@ #include "json_handling.h" namespace onnxruntime { -namespace hosting { +namespace server { namespace beast = boost::beast; namespace http = beast::http; @@ -17,7 +17,7 @@ void Predict(const std::string& name, const std::string& version, const std::string& action, /* in, out */ HttpContext& context, - std::shared_ptr env); + std::shared_ptr env); -} // namespace hosting +} // namespace server } // namespace onnxruntime diff --git a/onnxruntime/hosting/http/util.cc b/onnxruntime/server/http/util.cc similarity index 98% rename from onnxruntime/hosting/http/util.cc rename to onnxruntime/server/http/util.cc index 0b9d2a704e7e0..61986291f444a 100644 --- a/onnxruntime/hosting/http/util.cc +++ b/onnxruntime/server/http/util.cc @@ -11,7 +11,7 @@ namespace protobufutil = google::protobuf::util; namespace onnxruntime { -namespace hosting { +namespace server { static std::unordered_set protobuf_mime_types{ "application/octet-stream", @@ -80,5 +80,5 @@ SupportedContentType GetResponseContentType(const HttpContext& context) { return SupportedContentType::Unknown; } -} // namespace hosting +} // namespace server } // namespace onnxruntime \ No newline at end of file diff --git a/onnxruntime/hosting/http/util.h b/onnxruntime/server/http/util.h similarity index 92% rename from onnxruntime/hosting/http/util.h rename to onnxruntime/server/http/util.h index 469d40e4bda1a..ba38b3976a1cb 100644 --- a/onnxruntime/hosting/http/util.h +++ b/onnxruntime/server/http/util.h @@ -7,10 +7,10 @@ #include #include -#include "hosting/http/core/context.h" +#include "server/http/core/context.h" namespace onnxruntime { -namespace hosting { +namespace server { namespace beast = boost::beast; // from @@ -31,5 +31,5 @@ SupportedContentType GetRequestContentType(const HttpContext& context); // Currently we only support three types of response content type: */*, application/json and application/octet-stream SupportedContentType GetResponseContentType(const HttpContext& context); -} // namespace hosting +} // namespace server } // namespace onnxruntime diff --git a/onnxruntime/hosting/log_sink.h b/onnxruntime/server/log_sink.h similarity index 90% rename from onnxruntime/hosting/log_sink.h rename to onnxruntime/server/log_sink.h index b177fec6052fc..70df47f72c0a7 100644 --- a/onnxruntime/hosting/log_sink.h +++ b/onnxruntime/server/log_sink.h @@ -8,13 +8,13 @@ #include "core/common/logging/sinks/ostream_sink.h" namespace onnxruntime { -namespace hosting { +namespace server { class LogSink : public onnxruntime::logging::OStreamSink { public: LogSink() : OStreamSink(std::cout, /*flush*/ true) { } }; -} // namespace hosting +} // namespace server } // namespace onnxruntime diff --git a/onnxruntime/hosting/main.cc b/onnxruntime/server/main.cc similarity index 84% rename from onnxruntime/hosting/main.cc rename to onnxruntime/server/main.cc index 88a32942852e8..ac4891566b1d5 100644 --- a/onnxruntime/hosting/main.cc +++ b/onnxruntime/server/main.cc @@ -8,19 +8,19 @@ namespace beast = boost::beast; namespace http = beast::http; -namespace hosting = onnxruntime::hosting; +namespace server = onnxruntime::server; int main(int argc, char* argv[]) { - hosting::ServerConfiguration config{}; + server::ServerConfiguration config{}; auto res = config.ParseInput(argc, argv); - if (res == hosting::Result::ExitSuccess) { + if (res == server::Result::ExitSuccess) { exit(EXIT_SUCCESS); - } else if (res == hosting::Result::ExitFailure) { + } else if (res == server::Result::ExitFailure) { exit(EXIT_FAILURE); } - auto env = std::make_shared(config.logging_level); + auto env = std::make_shared(config.logging_level); auto logger = env->GetAppLogger(); LOGS(logger, VERBOSE) << "Logging manager initialized."; LOGS(logger, INFO) << "Model path: " << config.model_path; @@ -42,7 +42,7 @@ int main(int argc, char* argv[]) { } auto const boost_address = boost::asio::ip::make_address(config.address); - hosting::App app{}; + server::App app{}; app.RegisterStartup( [env](const auto& details) -> void { @@ -63,13 +63,13 @@ int main(int argc, char* argv[]) { if (!context.client_request_id.empty()) { context.response.insert("x-ms-client-request-id", (context).client_request_id); } - context.response.body() = hosting::CreateJsonError(context.error_code, context.error_message); + context.response.body() = server::CreateJsonError(context.error_code, context.error_message); }); app.RegisterPost( R"(/v1/models/([^/:]+)(?:/versions/(\d+))?:(classify|regress|predict))", [env](const auto& name, const auto& version, const auto& action, auto& context) -> void { - hosting::Predict(name, version, action, context, env); + server::Predict(name, version, action, context, env); }); app.Bind(boost_address, config.http_port) diff --git a/onnxruntime/hosting/protobuf/onnx-ml.proto b/onnxruntime/server/protobuf/onnx-ml.proto similarity index 100% rename from onnxruntime/hosting/protobuf/onnx-ml.proto rename to onnxruntime/server/protobuf/onnx-ml.proto diff --git a/onnxruntime/hosting/protobuf/predict.proto b/onnxruntime/server/protobuf/predict.proto similarity index 96% rename from onnxruntime/hosting/protobuf/predict.proto rename to onnxruntime/server/protobuf/predict.proto index 33029ad8a600c..21b04386353eb 100644 --- a/onnxruntime/hosting/protobuf/predict.proto +++ b/onnxruntime/server/protobuf/predict.proto @@ -2,7 +2,7 @@ syntax = "proto3"; import "onnx-ml.proto"; -package onnxruntime.hosting; +package onnxruntime.server; // PredictRequest specifies how inputs are mapped to tensors // and how outputs are filtered before returning to user. diff --git a/onnxruntime/hosting/server_configuration.h b/onnxruntime/server/server_configuration.h similarity index 97% rename from onnxruntime/hosting/server_configuration.h rename to onnxruntime/server/server_configuration.h index d5b1ee459bc21..4ae9b58496da7 100644 --- a/onnxruntime/hosting/server_configuration.h +++ b/onnxruntime/server/server_configuration.h @@ -11,7 +11,7 @@ #include "core/common/logging/logging.h" namespace onnxruntime { -namespace hosting { +namespace server { namespace po = boost::program_options; @@ -37,7 +37,7 @@ static std::unordered_map supported // Provides sane default values class ServerConfiguration { public: - const std::string full_desc = "ONNX Hosting: host an ONNX model with ONNX Runtime"; + const std::string full_desc = "ONNX Server: host an ONNX model with ONNX Runtime"; std::string model_path; std::string address = "0.0.0.0"; int http_port = 8001; @@ -127,5 +127,5 @@ class ServerConfiguration { } }; -} // namespace hosting +} // namespace server } // namespace onnxruntime diff --git a/onnxruntime/hosting/util.cc b/onnxruntime/server/util.cc similarity index 97% rename from onnxruntime/hosting/util.cc rename to onnxruntime/server/util.cc index 47bc4f03cb183..579c2e81bcc3c 100644 --- a/onnxruntime/hosting/util.cc +++ b/onnxruntime/server/util.cc @@ -8,7 +8,7 @@ #include "util.h" namespace onnxruntime { -namespace hosting { +namespace server { namespace protobufutil = google::protobuf::util; @@ -44,5 +44,5 @@ protobufutil::Status GenerateProtobufStatus(const onnxruntime::common::Status& o return protobufutil::Status(code, oss.str()); } -} // namespace hosting +} // namespace server } // namespace onnxruntime diff --git a/onnxruntime/hosting/util.h b/onnxruntime/server/util.h similarity index 90% rename from onnxruntime/hosting/util.h rename to onnxruntime/server/util.h index ead9e866d6a8f..e46a3a13fac63 100644 --- a/onnxruntime/hosting/util.h +++ b/onnxruntime/server/util.h @@ -8,11 +8,11 @@ #include "core/common/status.h" namespace onnxruntime { -namespace hosting { +namespace server { // Generate protobuf status from ONNX Runtime status google::protobuf::util::Status GenerateProtobufStatus(const onnxruntime::common::Status& onnx_status, const std::string& message); -} // namespace hosting +} // namespace server } // namespace onnxruntime diff --git a/onnxruntime/test/hosting/integration_tests/README.MD b/onnxruntime/test/hosting/integration_tests/README.MD deleted file mode 100644 index 2d308fe1425b2..0000000000000 --- a/onnxruntime/test/hosting/integration_tests/README.MD +++ /dev/null @@ -1,43 +0,0 @@ -# ONNX Runtime Hosting Application Integration Tests - -## Preparation - -Tests validation depends on protobuf generated *_pb2.py. So we need to have a sucessful hosting application build to have it generated in the build folder under `hosting_test` subfolder. The following instruction assume you are in the folder. Otherwise, tests will fail due to `ModuleNotFoundError`. - -## Functional Tests - -Functional test will be run when build with `--build_hosting --enable_hosting_tests`. To run it separately, here is the command line: - -```Bash -/usr/bin/python3 ./test_main.py -``` - -## Model Zoo Tests - -To run this set of tests, a prepared test data set need to be downloaded from [Azure Blob Storage](https://onnxhostingdev.blob.core.windows.net/testing/hosting_test_data_20190422.zip) and unzip to a folder, e.g. /home/foo/bar/model_zoo_test. It contains: - -* ONNX models from [ONNX Model Zoo](https://github.com/onnx/models) with opset 7/8/9. -* HTTP request json and protobuf files -* Expected response json and protobuf files - -If you only need the request and response data. Here is the [link](https://onnxhostingdev.blob.core.windows.net/testing/hosting_test_data_req_resp_only.zip) to download. - -To run the full model zoo tests, here is the command line: - -```Bash -/usr/bin/python3 ./model_zoo_tests.py -``` - -For example: - -```Bash -/usr/bin/python3 ./model_zoo_tests.py /some/where/hosting_app /home/foo/bar/model_zoo_test /home/foo/bar/model_zoo_test -``` - -If those models are in different folder but in the same structure as the test data, you could also do - -```Bash -/usr/bin/python3 ./model_zoo_tests.py /some/where/hosting_app /home/my/models/ /home/foo/bar/model_zoo_test/ -``` - -All tests are running in sequential order. \ No newline at end of file diff --git a/onnxruntime/test/server/integration_tests/README.MD b/onnxruntime/test/server/integration_tests/README.MD new file mode 100644 index 0000000000000..141d531d1a9bc --- /dev/null +++ b/onnxruntime/test/server/integration_tests/README.MD @@ -0,0 +1,43 @@ +# ONNX Runtime Server Integration Tests + +## Preparation + +Tests validation depends on protobuf generated *_pb2.py. So we need to have a sucessful server application build to have it generated in the build folder under `server_test` subfolder. The following instruction assume you are in the folder. Otherwise, tests will fail due to `ModuleNotFoundError`. + +## Functional Tests + +Functional test will be run when build with `--build_server --enable_server_tests`. To run it separately, here is the command line: + +```Bash +/usr/bin/python3 ./test_main.py +``` + +## Model Zoo Tests + +To run this set of tests, a prepared test data set need to be downloaded from [Azure Blob Storage](https://onnxserverdev.blob.core.windows.net/testing/server_test_data_20190422.zip) and unzip to a folder, e.g. /home/foo/bar/model_zoo_test. It contains: + +* ONNX models from [ONNX Model Zoo](https://github.com/onnx/models) with opset 7/8/9. +* HTTP request json and protobuf files +* Expected response json and protobuf files + +If you only need the request and response data. Here is the [link](https://onnxserverdev.blob.core.windows.net/testing/server_test_data_req_resp_only.zip) to download. + +To run the full model zoo tests, here is the command line: + +```Bash +/usr/bin/python3 ./model_zoo_tests.py +``` + +For example: + +```Bash +/usr/bin/python3 ./model_zoo_tests.py /some/where/server_app /home/foo/bar/model_zoo_test /home/foo/bar/model_zoo_test +``` + +If those models are in different folder but in the same structure as the test data, you could also do + +```Bash +/usr/bin/python3 ./model_zoo_tests.py /some/where/server_app /home/my/models/ /home/foo/bar/model_zoo_test/ +``` + +All tests are running in sequential order. \ No newline at end of file diff --git a/onnxruntime/test/hosting/integration_tests/function_tests.py b/onnxruntime/test/server/integration_tests/function_tests.py similarity index 88% rename from onnxruntime/test/hosting/integration_tests/function_tests.py rename to onnxruntime/test/server/integration_tests/function_tests.py index 76433cbfcb6f3..aabeb8bfc1ceb 100644 --- a/onnxruntime/test/hosting/integration_tests/function_tests.py +++ b/onnxruntime/test/server/integration_tests/function_tests.py @@ -17,28 +17,28 @@ class HttpJsonPayloadTests(unittest.TestCase): server_ip = '127.0.0.1' server_port = 54321 url_pattern = 'http://{0}:{1}/v1/models/{2}/versions/{3}:predict' - hosting_app_path = '' + server_app_path = '' test_data_path = '' model_path = '' log_level = 'verbose' - hosting_app_proc = None + server_app_proc = None wait_server_ready_in_seconds = 1 @classmethod def setUpClass(cls): - cmd = [cls.hosting_app_path, '--http_port', str(cls.server_port), '--model_path', os.path.join(cls.model_path, 'mnist.onnx'), '--log_level', cls.log_level] - print('Launching hosting app: [{0}]'.format(' '.join(cmd))) - cls.hosting_app_proc = subprocess.Popen(cmd) - print('Hosting app PID: {0}'.format(cls.hosting_app_proc.pid)) + cmd = [cls.server_app_path, '--http_port', str(cls.server_port), '--model_path', os.path.join(cls.model_path, 'mnist.onnx'), '--log_level', cls.log_level] + print('Launching server app: [{0}]'.format(' '.join(cmd))) + cls.server_app_proc = subprocess.Popen(cmd) + print('Server app PID: {0}'.format(cls.server_app_proc.pid)) print('Sleep {0} second(s) to wait for server initialization'.format(cls.wait_server_ready_in_seconds)) time.sleep(cls.wait_server_ready_in_seconds) @classmethod def tearDownClass(cls): - print('Shutdown hosting app') - cls.hosting_app_proc.kill() - print('PID {0} has been killed: {1}'.format(cls.hosting_app_proc.pid, test_util.is_process_killed(cls.hosting_app_proc.pid))) + print('Shutdown server app') + cls.server_app_proc.kill() + print('PID {0} has been killed: {1}'.format(cls.server_app_proc.pid, test_util.is_process_killed(cls.server_app_proc.pid))) def test_mnist_happy_path(self): @@ -193,28 +193,28 @@ class HttpProtobufPayloadTests(unittest.TestCase): server_ip = '127.0.0.1' server_port = 54321 url_pattern = 'http://{0}:{1}/v1/models/{2}/versions/{3}:predict' - hosting_app_path = '' + server_app_path = '' test_data_path = '' model_path = '' log_level = 'verbose' - hosting_app_proc = None + server_app_proc = None wait_server_ready_in_seconds = 1 @classmethod def setUpClass(cls): - cmd = [cls.hosting_app_path, '--http_port', str(cls.server_port), '--model_path', os.path.join(cls.model_path, 'mnist.onnx'), '--log_level', cls.log_level] - print('Launching hosting app: [{0}]'.format(' '.join(cmd))) - cls.hosting_app_proc = subprocess.Popen(cmd) - print('Hosting app PID: {0}'.format(cls.hosting_app_proc.pid)) + cmd = [cls.server_app_path, '--http_port', str(cls.server_port), '--model_path', os.path.join(cls.model_path, 'mnist.onnx'), '--log_level', cls.log_level] + print('Launching server app: [{0}]'.format(' '.join(cmd))) + cls.server_app_proc = subprocess.Popen(cmd) + print('Server app PID: {0}'.format(cls.server_app_proc.pid)) print('Sleep {0} second(s) to wait for server initialization'.format(cls.wait_server_ready_in_seconds)) time.sleep(cls.wait_server_ready_in_seconds) @classmethod def tearDownClass(cls): - print('Shutdown hosting app') - cls.hosting_app_proc.kill() - print('PID {0} has been killed: {1}'.format(cls.hosting_app_proc.pid, test_util.is_process_killed(cls.hosting_app_proc.pid))) + print('Shutdown server app') + cls.server_app_proc.kill() + print('PID {0} has been killed: {1}'.format(cls.server_app_proc.pid, test_util.is_process_killed(cls.server_app_proc.pid))) def test_mnist_happy_path(self): @@ -321,28 +321,28 @@ def test_any_accept_header(self): class HttpEndpointTests(unittest.TestCase): server_ip = '127.0.0.1' server_port = 54321 - hosting_app_path = '' + server_app_path = '' test_data_path = '' model_path = '' log_level = 'verbose' - hosting_app_proc = None + server_app_proc = None wait_server_ready_in_seconds = 1 @classmethod def setUpClass(cls): - cmd = [cls.hosting_app_path, '--http_port', str(cls.server_port), '--model_path', os.path.join(cls.model_path, 'mnist.onnx'), '--log_level', cls.log_level] - print('Launching hosting app: [{0}]'.format(' '.join(cmd))) - cls.hosting_app_proc = subprocess.Popen(cmd) - print('Hosting app PID: {0}'.format(cls.hosting_app_proc.pid)) + cmd = [cls.server_app_path, '--http_port', str(cls.server_port), '--model_path', os.path.join(cls.model_path, 'mnist.onnx'), '--log_level', cls.log_level] + print('Launching server app: [{0}]'.format(' '.join(cmd))) + cls.server_app_proc = subprocess.Popen(cmd) + print('Server app PID: {0}'.format(cls.server_app_proc.pid)) print('Sleep {0} second(s) to wait for server initialization'.format(cls.wait_server_ready_in_seconds)) time.sleep(cls.wait_server_ready_in_seconds) @classmethod def tearDownClass(cls): - print('Shutdown hosting app') - cls.hosting_app_proc.kill() - print('PID {0} has been killed: {1}'.format(cls.hosting_app_proc.pid, test_util.is_process_killed(cls.hosting_app_proc.pid))) + print('Shutdown server app') + cls.server_app_proc.kill() + print('PID {0} has been killed: {1}'.format(cls.server_app_proc.pid, test_util.is_process_killed(cls.server_app_proc.pid))) def test_health_endpoint(self): diff --git a/onnxruntime/test/hosting/integration_tests/model_zoo_data_prep.py b/onnxruntime/test/server/integration_tests/model_zoo_data_prep.py similarity index 100% rename from onnxruntime/test/hosting/integration_tests/model_zoo_data_prep.py rename to onnxruntime/test/server/integration_tests/model_zoo_data_prep.py diff --git a/onnxruntime/test/hosting/integration_tests/model_zoo_tests.py b/onnxruntime/test/server/integration_tests/model_zoo_tests.py similarity index 88% rename from onnxruntime/test/hosting/integration_tests/model_zoo_tests.py rename to onnxruntime/test/server/integration_tests/model_zoo_tests.py index 95f2e0e9a7438..c47a9c50b030d 100644 --- a/onnxruntime/test/hosting/integration_tests/model_zoo_tests.py +++ b/onnxruntime/test/server/integration_tests/model_zoo_tests.py @@ -11,7 +11,7 @@ class ModelZooTests(unittest.TestCase): server_ip = '127.0.0.1' server_port = 54321 url_pattern = 'http://{0}:{1}/v1/models/{2}/versions/{3}:predict' - hosting_app_path = '' # Required + server_app_path = '' # Required log_level = 'verbose' server_ready_in_seconds = 10 server_off_in_seconds = 100 @@ -55,11 +55,11 @@ def test_models_from_model_zoo(self): self.server_port = random.randint(30000, 40000) for model_path, data_paths in model_data_map.items(): - hosting_app_proc = None + server_app_proc = None try: - cmd = [self.hosting_app_path, '--http_port', str(self.server_port), '--model_path', os.path.join(model_path, 'model.onnx'), '--log_level', self.log_level] + cmd = [self.server_app_path, '--http_port', str(self.server_port), '--model_path', os.path.join(model_path, 'model.onnx'), '--log_level', self.log_level] test_util.test_log(cmd) - hosting_app_proc = test_util.launch_hosting_app(cmd, self.server_ip, self.server_port, self.server_ready_in_seconds) + server_app_proc = test_util.launch_server_app(cmd, self.server_ip, self.server_port, self.server_ready_in_seconds) test_util.test_log('[{0}] Run tests...'.format(model_path)) for test in data_paths: @@ -79,7 +79,7 @@ def test_models_from_model_zoo(self): resp = test_util.make_http_request(url, pb_request_headers, request_payload) test_util.pb_response_validation(self, resp, os.path.join(test, 'response.pb')) finally: - test_util.shutdown_hosting_app(hosting_app_proc, self.server_off_in_seconds) + test_util.shutdown_server_app(server_app_proc, self.server_off_in_seconds) if __name__ == '__main__': @@ -89,7 +89,7 @@ def test_models_from_model_zoo(self): test_suites = [] for tests in test_classes: - tests.hosting_app_path = sys.argv[1] + tests.server_app_path = sys.argv[1] tests.model_zoo_model_path = sys.argv[2] tests.model_zoo_test_data_path = sys.argv[3] diff --git a/onnxruntime/test/hosting/integration_tests/test_main.py b/onnxruntime/test/server/integration_tests/test_main.py similarity index 94% rename from onnxruntime/test/hosting/integration_tests/test_main.py rename to onnxruntime/test/server/integration_tests/test_main.py index 5e34768a3f8ef..83ff585fb5f78 100644 --- a/onnxruntime/test/hosting/integration_tests/test_main.py +++ b/onnxruntime/test/server/integration_tests/test_main.py @@ -13,7 +13,7 @@ test_suites = [] for tests in test_classes: - tests.hosting_app_path = sys.argv[1] + tests.server_app_path = sys.argv[1] tests.model_path = sys.argv[2] tests.test_data_path = sys.argv[3] tests.server_port = random.randint(30000, 50000) diff --git a/onnxruntime/test/hosting/integration_tests/test_util.py b/onnxruntime/test/server/integration_tests/test_util.py similarity index 87% rename from onnxruntime/test/hosting/integration_tests/test_util.py rename to onnxruntime/test/server/integration_tests/test_util.py index de1a926f41f3a..60840a873012b 100644 --- a/onnxruntime/test/hosting/integration_tests/test_util.py +++ b/onnxruntime/test/server/integration_tests/test_util.py @@ -70,24 +70,24 @@ def wait_service_up(server, port, timeout=1): return True -def launch_hosting_app(cmd, server_ip, server_port, wait_server_ready_in_seconds): - test_log('Launching hosting app: [{0}]'.format(' '.join(cmd))) - hosting_app_proc = subprocess.Popen(cmd) - test_log('Hosting app PID: {0}'.format(hosting_app_proc.pid)) +def launch_server_app(cmd, server_ip, server_port, wait_server_ready_in_seconds): + test_log('Launching server app: [{0}]'.format(' '.join(cmd))) + server_app_proc = subprocess.Popen(cmd) + test_log('Server app PID: {0}'.format(server_app_proc.pid)) test_log('Wait up to {0} second(s) for server initialization'.format(wait_server_ready_in_seconds)) wait_service_up(server_ip, server_port, wait_server_ready_in_seconds) - return hosting_app_proc + return server_app_proc -def shutdown_hosting_app(hosting_app_proc, wait_for_server_off_in_seconds): - if hosting_app_proc is not None: - test_log('Shutdown hosting app') - hosting_app_proc.kill() +def shutdown_server_app(server_app_proc, wait_for_server_off_in_seconds): + if server_app_proc is not None: + test_log('Shutdown server app') + server_app_proc.kill() - while not is_process_killed(hosting_app_proc.pid): - hosting_app_proc.wait(timeout=wait_for_server_off_in_seconds) - test_log('PID {0} has been killed: {1}'.format(hosting_app_proc.pid, is_process_killed(hosting_app_proc.pid))) + while not is_process_killed(server_app_proc.pid): + server_app_proc.wait(timeout=wait_for_server_off_in_seconds) + test_log('PID {0} has been killed: {1}'.format(server_app_proc.pid, is_process_killed(server_app_proc.pid))) # Additional sleep to make sure the resource has been freed. time.sleep(1) diff --git a/onnxruntime/test/hosting/unit_tests/converter_tests.cc b/onnxruntime/test/server/unit_tests/converter_tests.cc similarity index 88% rename from onnxruntime/test/hosting/unit_tests/converter_tests.cc rename to onnxruntime/test/server/unit_tests/converter_tests.cc index a470cc12e6258..e5f2d9642f2f6 100644 --- a/onnxruntime/test/hosting/unit_tests/converter_tests.cc +++ b/onnxruntime/test/server/unit_tests/converter_tests.cc @@ -8,10 +8,10 @@ #include "core/framework/allocatormgr.h" #include "test/framework/test_utils.h" #include "test/test_environment.h" -#include "hosting/converter.h" +#include "server/converter.h" namespace onnxruntime { -namespace hosting { +namespace server { namespace test { void CreateMLValueBool(AllocatorPtr alloc, const std::vector& dims, const bool* value, MLValue* p_mlvalue); @@ -26,63 +26,63 @@ TEST(MLDataTypeToTensorProtoDataTypeTests, MLDataTypeToTensorProtoDataTypeTests) auto logger = std::make_unique(::onnxruntime::test::DefaultLoggingManager().DefaultLogger()); MLDataType ml_data_type = DataTypeImpl::GetType(); - onnx::TensorProto_DataType result = onnxruntime::hosting::MLDataTypeToTensorProtoDataType(ml_data_type); + onnx::TensorProto_DataType result = onnxruntime::server::MLDataTypeToTensorProtoDataType(ml_data_type); EXPECT_EQ(result, onnx::TensorProto_DataType_FLOAT); ml_data_type = DataTypeImpl::GetType(); - result = onnxruntime::hosting::MLDataTypeToTensorProtoDataType(ml_data_type); + result = onnxruntime::server::MLDataTypeToTensorProtoDataType(ml_data_type); EXPECT_EQ(result, onnx::TensorProto_DataType_FLOAT16); ml_data_type = DataTypeImpl::GetType(); - result = onnxruntime::hosting::MLDataTypeToTensorProtoDataType(ml_data_type); + result = onnxruntime::server::MLDataTypeToTensorProtoDataType(ml_data_type); EXPECT_EQ(result, onnx::TensorProto_DataType_BFLOAT16); ml_data_type = DataTypeImpl::GetType(); - result = onnxruntime::hosting::MLDataTypeToTensorProtoDataType(ml_data_type); + result = onnxruntime::server::MLDataTypeToTensorProtoDataType(ml_data_type); EXPECT_EQ(result, onnx::TensorProto_DataType_DOUBLE); ml_data_type = DataTypeImpl::GetType(); - result = onnxruntime::hosting::MLDataTypeToTensorProtoDataType(ml_data_type); + result = onnxruntime::server::MLDataTypeToTensorProtoDataType(ml_data_type); EXPECT_EQ(result, onnx::TensorProto_DataType_UINT8); ml_data_type = DataTypeImpl::GetType(); - result = onnxruntime::hosting::MLDataTypeToTensorProtoDataType(ml_data_type); + result = onnxruntime::server::MLDataTypeToTensorProtoDataType(ml_data_type); EXPECT_EQ(result, onnx::TensorProto_DataType_INT8); ml_data_type = DataTypeImpl::GetType(); - result = onnxruntime::hosting::MLDataTypeToTensorProtoDataType(ml_data_type); + result = onnxruntime::server::MLDataTypeToTensorProtoDataType(ml_data_type); EXPECT_EQ(result, onnx::TensorProto_DataType_UINT16); ml_data_type = DataTypeImpl::GetType(); - result = onnxruntime::hosting::MLDataTypeToTensorProtoDataType(ml_data_type); + result = onnxruntime::server::MLDataTypeToTensorProtoDataType(ml_data_type); EXPECT_EQ(result, onnx::TensorProto_DataType_INT16); ml_data_type = DataTypeImpl::GetType(); - result = onnxruntime::hosting::MLDataTypeToTensorProtoDataType(ml_data_type); + result = onnxruntime::server::MLDataTypeToTensorProtoDataType(ml_data_type); EXPECT_EQ(result, onnx::TensorProto_DataType_UINT32); ml_data_type = DataTypeImpl::GetType(); - result = onnxruntime::hosting::MLDataTypeToTensorProtoDataType(ml_data_type); + result = onnxruntime::server::MLDataTypeToTensorProtoDataType(ml_data_type); EXPECT_EQ(result, onnx::TensorProto_DataType_INT32); ml_data_type = DataTypeImpl::GetType(); - result = onnxruntime::hosting::MLDataTypeToTensorProtoDataType(ml_data_type); + result = onnxruntime::server::MLDataTypeToTensorProtoDataType(ml_data_type); EXPECT_EQ(result, onnx::TensorProto_DataType_UINT64); ml_data_type = DataTypeImpl::GetType(); - result = onnxruntime::hosting::MLDataTypeToTensorProtoDataType(ml_data_type); + result = onnxruntime::server::MLDataTypeToTensorProtoDataType(ml_data_type); EXPECT_EQ(result, onnx::TensorProto_DataType_INT64); ml_data_type = DataTypeImpl::GetType(); - result = onnxruntime::hosting::MLDataTypeToTensorProtoDataType(ml_data_type); + result = onnxruntime::server::MLDataTypeToTensorProtoDataType(ml_data_type); EXPECT_EQ(result, onnx::TensorProto_DataType_STRING); ml_data_type = DataTypeImpl::GetType(); - result = onnxruntime::hosting::MLDataTypeToTensorProtoDataType(ml_data_type); + result = onnxruntime::server::MLDataTypeToTensorProtoDataType(ml_data_type); EXPECT_EQ(result, onnx::TensorProto_DataType_BOOL); ml_data_type = DataTypeImpl::GetTensorType(); - result = onnxruntime::hosting::MLDataTypeToTensorProtoDataType(ml_data_type); + result = onnxruntime::server::MLDataTypeToTensorProtoDataType(ml_data_type); EXPECT_EQ(result, onnx::TensorProto_DataType_UNDEFINED); } @@ -95,7 +95,7 @@ TEST(MLValueToTensorProtoTests, FloatToRaw) { onnxruntime::test::CreateMLValue(TestCPUExecutionProvider()->GetAllocator(0, OrtMemTypeDefault), dims_mul_x, values_mul_x, &ml_value); onnx::TensorProto tp; - common::Status status = onnxruntime::hosting::MLValueToTensorProto(ml_value, /* using_raw_data */ true, std::move(logger), tp); + common::Status status = onnxruntime::server::MLValueToTensorProto(ml_value, /* using_raw_data */ true, std::move(logger), tp); EXPECT_TRUE(status.IsOK()); // Verify data type @@ -134,7 +134,7 @@ TEST(MLValueToTensorProtoTests, FloatToFloatData) { onnxruntime::test::CreateMLValue(TestCPUExecutionProvider()->GetAllocator(0, OrtMemTypeDefault), dims_mul_x, values_mul_x, &ml_value); onnx::TensorProto tp; - common::Status status = onnxruntime::hosting::MLValueToTensorProto(ml_value, /* using_raw_data */ false, std::move(logger), tp); + common::Status status = onnxruntime::server::MLValueToTensorProto(ml_value, /* using_raw_data */ false, std::move(logger), tp); EXPECT_TRUE(status.IsOK()); // Verify data type @@ -169,7 +169,7 @@ TEST(MLValueToTensorProtoTests, Int32ToRaw) { onnxruntime::test::CreateMLValue(TestCPUExecutionProvider()->GetAllocator(0, OrtMemTypeDefault), dims_mul_x, values_mul_x, &ml_value); onnx::TensorProto tp; - common::Status status = onnxruntime::hosting::MLValueToTensorProto(ml_value, /* using_raw_data */ true, std::move(logger), tp); + common::Status status = onnxruntime::server::MLValueToTensorProto(ml_value, /* using_raw_data */ true, std::move(logger), tp); EXPECT_TRUE(status.IsOK()); // Verify data type @@ -208,7 +208,7 @@ TEST(MLValueToTensorProtoTests, Int32ToInt32Data) { onnxruntime::test::CreateMLValue(TestCPUExecutionProvider()->GetAllocator(0, OrtMemTypeDefault), dims_mul_x, values_mul_x, &ml_value); onnx::TensorProto tp; - common::Status status = onnxruntime::hosting::MLValueToTensorProto(ml_value, /* using_raw_data */ false, std::move(logger), tp); + common::Status status = onnxruntime::server::MLValueToTensorProto(ml_value, /* using_raw_data */ false, std::move(logger), tp); EXPECT_TRUE(status.IsOK()); // Verify data type @@ -243,7 +243,7 @@ TEST(MLValueToTensorProtoTests, UInt8ToRaw) { onnxruntime::test::CreateMLValue(TestCPUExecutionProvider()->GetAllocator(0, OrtMemTypeDefault), dims_mul_x, values_mul_x, &ml_value); onnx::TensorProto tp; - common::Status status = onnxruntime::hosting::MLValueToTensorProto(ml_value, /* using_raw_data */ true, std::move(logger), tp); + common::Status status = onnxruntime::server::MLValueToTensorProto(ml_value, /* using_raw_data */ true, std::move(logger), tp); EXPECT_TRUE(status.IsOK()); // Verify data type @@ -282,7 +282,7 @@ TEST(MLValueToTensorProtoTests, UInt8ToInt32Data) { onnxruntime::test::CreateMLValue(TestCPUExecutionProvider()->GetAllocator(0, OrtMemTypeDefault), dims_mul_x, values_mul_x, &ml_value); onnx::TensorProto tp; - common::Status status = onnxruntime::hosting::MLValueToTensorProto(ml_value, /* using_raw_data */ false, std::move(logger), tp); + common::Status status = onnxruntime::server::MLValueToTensorProto(ml_value, /* using_raw_data */ false, std::move(logger), tp); EXPECT_TRUE(status.IsOK()); // Verify data type @@ -319,7 +319,7 @@ TEST(MLValueToTensorProtoTests, Int8ToRaw) { onnxruntime::test::CreateMLValue(TestCPUExecutionProvider()->GetAllocator(0, OrtMemTypeDefault), dims_mul_x, values_mul_x, &ml_value); onnx::TensorProto tp; - common::Status status = onnxruntime::hosting::MLValueToTensorProto(ml_value, /* using_raw_data */ true, std::move(logger), tp); + common::Status status = onnxruntime::server::MLValueToTensorProto(ml_value, /* using_raw_data */ true, std::move(logger), tp); EXPECT_TRUE(status.IsOK()); // Verify data type @@ -358,7 +358,7 @@ TEST(MLValueToTensorProtoTests, Int8ToInt32Data) { onnxruntime::test::CreateMLValue(TestCPUExecutionProvider()->GetAllocator(0, OrtMemTypeDefault), dims_mul_x, values_mul_x, &ml_value); onnx::TensorProto tp; - common::Status status = onnxruntime::hosting::MLValueToTensorProto(ml_value, /* using_raw_data */ false, std::move(logger), tp); + common::Status status = onnxruntime::server::MLValueToTensorProto(ml_value, /* using_raw_data */ false, std::move(logger), tp); EXPECT_TRUE(status.IsOK()); // Verify data type @@ -395,7 +395,7 @@ TEST(MLValueToTensorProtoTests, UInt16ToRaw) { onnxruntime::test::CreateMLValue(TestCPUExecutionProvider()->GetAllocator(0, OrtMemTypeDefault), dims_mul_x, values_mul_x, &ml_value); onnx::TensorProto tp; - common::Status status = onnxruntime::hosting::MLValueToTensorProto(ml_value, /* using_raw_data */ true, std::move(logger), tp); + common::Status status = onnxruntime::server::MLValueToTensorProto(ml_value, /* using_raw_data */ true, std::move(logger), tp); EXPECT_TRUE(status.IsOK()); // Verify data type @@ -434,7 +434,7 @@ TEST(MLValueToTensorProtoTests, UInt16ToInt32Data) { onnxruntime::test::CreateMLValue(TestCPUExecutionProvider()->GetAllocator(0, OrtMemTypeDefault), dims_mul_x, values_mul_x, &ml_value); onnx::TensorProto tp; - common::Status status = onnxruntime::hosting::MLValueToTensorProto(ml_value, /* using_raw_data */ false, std::move(logger), tp); + common::Status status = onnxruntime::server::MLValueToTensorProto(ml_value, /* using_raw_data */ false, std::move(logger), tp); EXPECT_TRUE(status.IsOK()); // Verify data type @@ -471,7 +471,7 @@ TEST(MLValueToTensorProtoTests, Int16ToRaw) { onnxruntime::test::CreateMLValue(TestCPUExecutionProvider()->GetAllocator(0, OrtMemTypeDefault), dims_mul_x, values_mul_x, &ml_value); onnx::TensorProto tp; - common::Status status = onnxruntime::hosting::MLValueToTensorProto(ml_value, /* using_raw_data */ true, std::move(logger), tp); + common::Status status = onnxruntime::server::MLValueToTensorProto(ml_value, /* using_raw_data */ true, std::move(logger), tp); EXPECT_TRUE(status.IsOK()); // Verify data type @@ -510,7 +510,7 @@ TEST(MLValueToTensorProtoTests, Int16ToInt32Data) { onnxruntime::test::CreateMLValue(TestCPUExecutionProvider()->GetAllocator(0, OrtMemTypeDefault), dims_mul_x, values_mul_x, &ml_value); onnx::TensorProto tp; - common::Status status = onnxruntime::hosting::MLValueToTensorProto(ml_value, /* using_raw_data */ false, std::move(logger), tp); + common::Status status = onnxruntime::server::MLValueToTensorProto(ml_value, /* using_raw_data */ false, std::move(logger), tp); EXPECT_TRUE(status.IsOK()); // Verify data type @@ -547,7 +547,7 @@ TEST(MLValueToTensorProtoTests, BoolToRaw) { CreateMLValueBool(TestCPUExecutionProvider()->GetAllocator(0, OrtMemTypeDefault), dims_mul_x, values_mul_x, &ml_value); onnx::TensorProto tp; - common::Status status = onnxruntime::hosting::MLValueToTensorProto(ml_value, /* using_raw_data */ true, std::move(logger), tp); + common::Status status = onnxruntime::server::MLValueToTensorProto(ml_value, /* using_raw_data */ true, std::move(logger), tp); EXPECT_TRUE(status.IsOK()); // Verify data type @@ -586,7 +586,7 @@ TEST(MLValueToTensorProtoTests, BoolToInt32Data) { CreateMLValueBool(TestCPUExecutionProvider()->GetAllocator(0, OrtMemTypeDefault), dims_mul_x, values_mul_x, &ml_value); onnx::TensorProto tp; - common::Status status = onnxruntime::hosting::MLValueToTensorProto(ml_value, /* using_raw_data */ false, std::move(logger), tp); + common::Status status = onnxruntime::server::MLValueToTensorProto(ml_value, /* using_raw_data */ false, std::move(logger), tp); EXPECT_TRUE(status.IsOK()); // Verify data type @@ -629,7 +629,7 @@ TEST(MLValueToTensorProtoTests, Float16ToRaw) { onnxruntime::test::CreateMLValue(TestCPUExecutionProvider()->GetAllocator(0, OrtMemTypeDefault), dims_mul_x, values_mul_x, &ml_value); onnx::TensorProto tp; - common::Status status = onnxruntime::hosting::MLValueToTensorProto(ml_value, /* using_raw_data */ true, std::move(logger), tp); + common::Status status = onnxruntime::server::MLValueToTensorProto(ml_value, /* using_raw_data */ true, std::move(logger), tp); EXPECT_TRUE(status.IsOK()); // Verify data type @@ -674,7 +674,7 @@ TEST(MLValueToTensorProtoTests, FloatToInt32Data) { onnxruntime::test::CreateMLValue(TestCPUExecutionProvider()->GetAllocator(0, OrtMemTypeDefault), dims_mul_x, values_mul_x, &ml_value); onnx::TensorProto tp; - common::Status status = onnxruntime::hosting::MLValueToTensorProto(ml_value, /* using_raw_data */ false, std::move(logger), tp); + common::Status status = onnxruntime::server::MLValueToTensorProto(ml_value, /* using_raw_data */ false, std::move(logger), tp); EXPECT_TRUE(status.IsOK()); // Verify data type @@ -717,7 +717,7 @@ TEST(MLValueToTensorProtoTests, BFloat16ToRaw) { onnxruntime::test::CreateMLValue(TestCPUExecutionProvider()->GetAllocator(0, OrtMemTypeDefault), dims_mul_x, values_mul_x, &ml_value); onnx::TensorProto tp; - common::Status status = onnxruntime::hosting::MLValueToTensorProto(ml_value, /* using_raw_data */ true, std::move(logger), tp); + common::Status status = onnxruntime::server::MLValueToTensorProto(ml_value, /* using_raw_data */ true, std::move(logger), tp); EXPECT_TRUE(status.IsOK()); // Verify data type @@ -762,7 +762,7 @@ TEST(MLValueToTensorProtoTests, BFloatToInt32Data) { onnxruntime::test::CreateMLValue(TestCPUExecutionProvider()->GetAllocator(0, OrtMemTypeDefault), dims_mul_x, values_mul_x, &ml_value); onnx::TensorProto tp; - common::Status status = onnxruntime::hosting::MLValueToTensorProto(ml_value, /* using_raw_data */ false, std::move(logger), tp); + common::Status status = onnxruntime::server::MLValueToTensorProto(ml_value, /* using_raw_data */ false, std::move(logger), tp); EXPECT_TRUE(status.IsOK()); // Verify data type @@ -805,7 +805,7 @@ TEST(MLValueToTensorProtoTests, StringToStringData) { } onnx::TensorProto tp; - common::Status status = onnxruntime::hosting::MLValueToTensorProto(ml_value, /* using_raw_data */ false, std::move(logger), tp); + common::Status status = onnxruntime::server::MLValueToTensorProto(ml_value, /* using_raw_data */ false, std::move(logger), tp); EXPECT_TRUE(status.IsOK()); // Verify data type @@ -841,7 +841,7 @@ TEST(MLValueToTensorProtoTests, Int64ToRaw) { onnxruntime::test::CreateMLValue(TestCPUExecutionProvider()->GetAllocator(0, OrtMemTypeDefault), dims_mul_x, values_mul_x, &ml_value); onnx::TensorProto tp; - common::Status status = onnxruntime::hosting::MLValueToTensorProto(ml_value, /* using_raw_data */ true, std::move(logger), tp); + common::Status status = onnxruntime::server::MLValueToTensorProto(ml_value, /* using_raw_data */ true, std::move(logger), tp); EXPECT_TRUE(status.IsOK()); // Verify data type @@ -880,7 +880,7 @@ TEST(MLValueToTensorProtoTests, Int64ToInt64Data) { onnxruntime::test::CreateMLValue(TestCPUExecutionProvider()->GetAllocator(0, OrtMemTypeDefault), dims_mul_x, values_mul_x, &ml_value); onnx::TensorProto tp; - common::Status status = onnxruntime::hosting::MLValueToTensorProto(ml_value, /* using_raw_data */ false, std::move(logger), tp); + common::Status status = onnxruntime::server::MLValueToTensorProto(ml_value, /* using_raw_data */ false, std::move(logger), tp); EXPECT_TRUE(status.IsOK()); // Verify data type @@ -915,7 +915,7 @@ TEST(MLValueToTensorProtoTests, UInt32ToRaw) { onnxruntime::test::CreateMLValue(TestCPUExecutionProvider()->GetAllocator(0, OrtMemTypeDefault), dims_mul_x, values_mul_x, &ml_value); onnx::TensorProto tp; - common::Status status = onnxruntime::hosting::MLValueToTensorProto(ml_value, /* using_raw_data */ true, std::move(logger), tp); + common::Status status = onnxruntime::server::MLValueToTensorProto(ml_value, /* using_raw_data */ true, std::move(logger), tp); EXPECT_TRUE(status.IsOK()); // Verify data type @@ -954,7 +954,7 @@ TEST(MLValueToTensorProtoTests, UInt32ToUint64Data) { onnxruntime::test::CreateMLValue(TestCPUExecutionProvider()->GetAllocator(0, OrtMemTypeDefault), dims_mul_x, values_mul_x, &ml_value); onnx::TensorProto tp; - common::Status status = onnxruntime::hosting::MLValueToTensorProto(ml_value, /* using_raw_data */ false, std::move(logger), tp); + common::Status status = onnxruntime::server::MLValueToTensorProto(ml_value, /* using_raw_data */ false, std::move(logger), tp); EXPECT_TRUE(status.IsOK()); // Verify data type @@ -992,7 +992,7 @@ TEST(MLValueToTensorProtoTests, UInt64ToRaw) { onnxruntime::test::CreateMLValue(TestCPUExecutionProvider()->GetAllocator(0, OrtMemTypeDefault), dims_mul_x, values_mul_x, &ml_value); onnx::TensorProto tp; - common::Status status = onnxruntime::hosting::MLValueToTensorProto(ml_value, /* using_raw_data */ true, std::move(logger), tp); + common::Status status = onnxruntime::server::MLValueToTensorProto(ml_value, /* using_raw_data */ true, std::move(logger), tp); EXPECT_TRUE(status.IsOK()); // Verify data type @@ -1031,7 +1031,7 @@ TEST(MLValueToTensorProtoTests, UInt64ToInt64Data) { onnxruntime::test::CreateMLValue(TestCPUExecutionProvider()->GetAllocator(0, OrtMemTypeDefault), dims_mul_x, values_mul_x, &ml_value); onnx::TensorProto tp; - common::Status status = onnxruntime::hosting::MLValueToTensorProto(ml_value, /* using_raw_data */ false, std::move(logger), tp); + common::Status status = onnxruntime::server::MLValueToTensorProto(ml_value, /* using_raw_data */ false, std::move(logger), tp); EXPECT_TRUE(status.IsOK()); // Verify data type @@ -1066,7 +1066,7 @@ TEST(MLValueToTensorProtoTests, DoubleToRaw) { onnxruntime::test::CreateMLValue(TestCPUExecutionProvider()->GetAllocator(0, OrtMemTypeDefault), dims_mul_x, values_mul_x, &ml_value); onnx::TensorProto tp; - common::Status status = onnxruntime::hosting::MLValueToTensorProto(ml_value, /* using_raw_data */ true, std::move(logger), tp); + common::Status status = onnxruntime::server::MLValueToTensorProto(ml_value, /* using_raw_data */ true, std::move(logger), tp); EXPECT_TRUE(status.IsOK()); // Verify data type @@ -1105,7 +1105,7 @@ TEST(MLValueToTensorProtoTests, DoubleToInt64Data) { onnxruntime::test::CreateMLValue(TestCPUExecutionProvider()->GetAllocator(0, OrtMemTypeDefault), dims_mul_x, values_mul_x, &ml_value); onnx::TensorProto tp; - common::Status status = onnxruntime::hosting::MLValueToTensorProto(ml_value, /* using_raw_data */ false, std::move(logger), tp); + common::Status status = onnxruntime::server::MLValueToTensorProto(ml_value, /* using_raw_data */ false, std::move(logger), tp); EXPECT_TRUE(status.IsOK()); // Verify data type @@ -1147,5 +1147,5 @@ void CreateMLValueBool(AllocatorPtr alloc, } } // namespace test -} // namespace hosting +} // namespace server } // namespace onnxruntime \ No newline at end of file diff --git a/onnxruntime/test/hosting/unit_tests/http_routes_tests.cc b/onnxruntime/test/server/unit_tests/http_routes_tests.cc similarity index 98% rename from onnxruntime/test/hosting/unit_tests/http_routes_tests.cc rename to onnxruntime/test/server/unit_tests/http_routes_tests.cc index 9acc32adce9c9..dbb955050165c 100644 --- a/onnxruntime/test/hosting/unit_tests/http_routes_tests.cc +++ b/onnxruntime/test/server/unit_tests/http_routes_tests.cc @@ -4,10 +4,10 @@ #include #include "gtest/gtest.h" -#include "hosting/http/core/routes.h" +#include "server/http/core/routes.h" namespace onnxruntime { -namespace hosting { +namespace server { namespace test { using test_data = std::tuple; @@ -105,5 +105,5 @@ void run_route(const std::string& pattern, http::verb method, const std::vector< } } // namespace test -} // namespace hosting +} // namespace server } // namespace onnxruntime diff --git a/onnxruntime/test/hosting/unit_tests/json_handling_tests.cc b/onnxruntime/test/server/unit_tests/json_handling_tests.cc similarity index 93% rename from onnxruntime/test/hosting/unit_tests/json_handling_tests.cc rename to onnxruntime/test/server/unit_tests/json_handling_tests.cc index 3bb62a3945eb7..0bd2d16e45795 100644 --- a/onnxruntime/test/hosting/unit_tests/json_handling_tests.cc +++ b/onnxruntime/test/server/unit_tests/json_handling_tests.cc @@ -7,33 +7,33 @@ #include "gtest/gtest.h" #include "predict.pb.h" -#include "hosting/http/json_handling.h" +#include "server/http/json_handling.h" namespace onnxruntime { -namespace hosting { +namespace server { namespace test { namespace protobufutil = google::protobuf::util; TEST(JsonDeserializationTests, HappyPath) { std::string input_json = R"({"inputs":{"Input3":{"dims":["1","1","28","28"],"dataType":1,"rawData":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAPwAAQEAAAAAAAAAAAAAAgEAAAABAAAAAAAAAMEEAAAAAAAAAAAAAYEEAAIA/AAAAAAAAmEEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEEAAAAAAAAAAAAA4EAAAAAAAACAPwAAIEEAAAAAAAAAQAAAAEAAAIBBAAAAAAAAQEAAAEBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA4EAAAABBAAAAAAAAAEEAAAAAAAAAAAAAAEEAAAAAAAAAAAAAmEEAAAAAAAAAAAAAgD8AAKhBAAAAAAAAgEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgD8AAAAAAAAAAAAAgD8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAMEEAAAAAAAAAAAAAIEEAAEBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABQQQAAAAAAAHBBAAAgQQAA0EEAAAhCAACIQQAAmkIAADVDAAAyQwAADEIAAIBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAWQwAAfkMAAHpDAAB7QwAAc0MAAHxDAAB8QwAAf0MAADRCAADAQAAAAAAAAKBAAAAAAAAAEEEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOBAAACQQgAATUMAAH9DAABuQwAAc0MAAH9DAAB+QwAAe0MAAHhDAABJQwAARkMAAGRCAAAAAAAAmEEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAWkMAAH9DAABxQwAAf0MAAHlDAAB6QwAAe0MAAHpDAAB/QwAAf0MAAHJDAABgQwAAREIAAAAAAABAQQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgD8AAABAAABAQAAAAEAAAABAAACAPwAAAAAAAIJCAABkQwAAf0MAAH5DAAB0QwAA7kIAAAhCAAAkQgAA3EIAAHpDAAB/QwAAeEMAAPhCAACgQQAAAAAAAAAAAAAAAAAAAAAAAAAAAACAPwAAgD8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEBBAAAAAAAAeEIAAM5CAADiQgAA6kIAAAhCAAAAAAAAAAAAAAAAAABIQwAAdEMAAH9DAAB/QwAAAAAAAEBBAAAAAAAAAAAAAAAAAAAAAAAAAEAAAIA/AAAAAAAAAAAAAAAAAAAAAAAAgD8AAABAAAAAAAAAAAAAAABAAACAQAAAAAAAADBBAAAAAAAA4EAAAMBAAAAAAAAAlkIAAHRDAAB/QwAAf0MAAIBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIA/AAAAQAAAQEAAAIBAAACAQAAAAAAAAGBBAAAAAAAAAAAAAAAAAAAQQQAAAAAAAABAAAAAAAAAAAAAAAhCAAB/QwAAf0MAAH1DAAAgQQAAIEEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIA/AAAAQAAAQEAAAABAAAAAAAAAAAAAAEBAAAAAQAAAAAAAAFBBAAAwQQAAAAAAAAAAAAAAAAAAwEAAAEBBAADGQgAAf0MAAH5DAAB4QwAAcEEAAEBBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIA/AACAPwAAgD8AAAAAAAAAAAAAAAAAAAAAAACAPwAAgD8AAAAAAAAAAAAAoEAAAMBAAAAwQQAAAAAAAAAAAACIQQAAOEMAAHdDAAB/QwAAc0MAAFBBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAABAAACAQAAAgEAAAAAAAAAwQQAAAAAAAExCAAC8QgAAqkIAAKBAAACgQAAAyEEAAHZDAAB2QwAAf0MAAFBDAAAAAAAAEEEAAAAAAAAAAAAAAAAAAAAAAACAQAAAgD8AAAAAAAAAAAAAgD8AAOBAAABwQQAAmEEAAMZCAADOQgAANkMAAD1DAABtQwAAfUMAAHxDAAA/QwAAPkMAAGNDAABzQwAAfEMAAFJDAACQQQAA4EAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIBAAAAAAAAAAAAAAABCAADaQgAAOUMAAHdDAAB/QwAAckMAAH9DAAB0QwAAf0MAAH9DAAByQwAAe0MAAH9DAABwQwAAf0MAAH9DAABaQwAA+EIAABBBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAD+QgAAf0MAAGtDAAB/QwAAf0MAAHdDAABlQwAAVEMAAHJDAAB6QwAAf0MAAH9DAAB4QwAAf0MAAH1DAAB5QwAAf0MAAHNDAAAqQwAAQEEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMEEAAAAAAAAQQQAAfUMAAH9DAAB/QwAAaUMAAEpDAACqQgAAAAAAAFRCAABEQwAAbkMAAH9DAABjQwAAbkMAAA5DAADaQgAAQUMAAH9DAABwQwAAf0MAADRDAAAAAAAAAAAAAAAAAAAAAAAAwEAAAAAAAACwQQAAgD8AAHVDAABzQwAAfkMAAH9DAABZQwAAa0MAAGJDAABVQwAAdEMAAHtDAAB/QwAAb0MAAJpCAAAAAAAAAAAAAKBBAAA2QwAAd0MAAG9DAABzQwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIBAAAAlQwAAe0MAAH9DAAB1QwAAf0MAAHJDAAB9QwAAekMAAH9DAABFQwAA1kIAAGxCAAAAAAAAkEEAAABAAADAQAAAAAAAAFhCAAB/QwAAHkMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAwEEAAAAAAAAAAAAAwEAAAAhCAAAnQwAAQkMAADBDAAA3QwAAJEMAADBCAAAAQAAAIEEAAMBAAADAQAAAAAAAAAAAAACgQAAAAAAAAIA/AAAAAAAAYEEAAABAAAAAAAAAAAAAAAAAAAAAAAAAIEEAAAAAAABgQQAAAAAAAEBBAAAAAAAAoEAAAAAAAACAPwAAAAAAAMBAAAAAAAAA4EAAAAAAAAAAAAAAAAAAAABBAAAAAAAAIEEAAAAAAACgQAAAAAAAAAAAAAAgQQAAAAAAAAAAAAAAAAAAAAAAAAAAAABgQQAAAAAAAIBAAAAAAAAAAAAAAMhBAAAAAAAAAAAAABBBAAAAAAAAAAAAABBBAAAAAAAAMEEAAAAAAACAPwAAAAAAAAAAAAAAQAAAAAAAAAAAAADgQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=="}},"outputFilter":["Plus214_Output_0"]})"; - onnxruntime::hosting::PredictRequest request; - protobufutil::Status status = onnxruntime::hosting::GetRequestFromJson(input_json, request); + onnxruntime::server::PredictRequest request; + protobufutil::Status status = onnxruntime::server::GetRequestFromJson(input_json, request); EXPECT_EQ(protobufutil::error::OK, status.error_code()); } TEST(JsonDeserializationTests, WithUnknownField) { std::string input_json = R"({"foo": "bar","inputs":{"Input3":{"dims":["1","1","28","28"],"dataType":1,"rawData":"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAPwAAQEAAAAAAAAAAAAAAgEAAAABAAAAAAAAAMEEAAAAAAAAAAAAAYEEAAIA/AAAAAAAAmEEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEEAAAAAAAAAAAAA4EAAAAAAAACAPwAAIEEAAAAAAAAAQAAAAEAAAIBBAAAAAAAAQEAAAEBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA4EAAAABBAAAAAAAAAEEAAAAAAAAAAAAAAEEAAAAAAAAAAAAAmEEAAAAAAAAAAAAAgD8AAKhBAAAAAAAAgEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgD8AAAAAAAAAAAAAgD8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAMEEAAAAAAAAAAAAAIEEAAEBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABQQQAAAAAAAHBBAAAgQQAA0EEAAAhCAACIQQAAmkIAADVDAAAyQwAADEIAAIBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAWQwAAfkMAAHpDAAB7QwAAc0MAAHxDAAB8QwAAf0MAADRCAADAQAAAAAAAAKBAAAAAAAAAEEEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOBAAACQQgAATUMAAH9DAABuQwAAc0MAAH9DAAB+QwAAe0MAAHhDAABJQwAARkMAAGRCAAAAAAAAmEEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAWkMAAH9DAABxQwAAf0MAAHlDAAB6QwAAe0MAAHpDAAB/QwAAf0MAAHJDAABgQwAAREIAAAAAAABAQQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgD8AAABAAABAQAAAAEAAAABAAACAPwAAAAAAAIJCAABkQwAAf0MAAH5DAAB0QwAA7kIAAAhCAAAkQgAA3EIAAHpDAAB/QwAAeEMAAPhCAACgQQAAAAAAAAAAAAAAAAAAAAAAAAAAAACAPwAAgD8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEBBAAAAAAAAeEIAAM5CAADiQgAA6kIAAAhCAAAAAAAAAAAAAAAAAABIQwAAdEMAAH9DAAB/QwAAAAAAAEBBAAAAAAAAAAAAAAAAAAAAAAAAAEAAAIA/AAAAAAAAAAAAAAAAAAAAAAAAgD8AAABAAAAAAAAAAAAAAABAAACAQAAAAAAAADBBAAAAAAAA4EAAAMBAAAAAAAAAlkIAAHRDAAB/QwAAf0MAAIBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIA/AAAAQAAAQEAAAIBAAACAQAAAAAAAAGBBAAAAAAAAAAAAAAAAAAAQQQAAAAAAAABAAAAAAAAAAAAAAAhCAAB/QwAAf0MAAH1DAAAgQQAAIEEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIA/AAAAQAAAQEAAAABAAAAAAAAAAAAAAEBAAAAAQAAAAAAAAFBBAAAwQQAAAAAAAAAAAAAAAAAAwEAAAEBBAADGQgAAf0MAAH5DAAB4QwAAcEEAAEBBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIA/AACAPwAAgD8AAAAAAAAAAAAAAAAAAAAAAACAPwAAgD8AAAAAAAAAAAAAoEAAAMBAAAAwQQAAAAAAAAAAAACIQQAAOEMAAHdDAAB/QwAAc0MAAFBBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAABAAACAQAAAgEAAAAAAAAAwQQAAAAAAAExCAAC8QgAAqkIAAKBAAACgQAAAyEEAAHZDAAB2QwAAf0MAAFBDAAAAAAAAEEEAAAAAAAAAAAAAAAAAAAAAAACAQAAAgD8AAAAAAAAAAAAAgD8AAOBAAABwQQAAmEEAAMZCAADOQgAANkMAAD1DAABtQwAAfUMAAHxDAAA/QwAAPkMAAGNDAABzQwAAfEMAAFJDAACQQQAA4EAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIBAAAAAAAAAAAAAAABCAADaQgAAOUMAAHdDAAB/QwAAckMAAH9DAAB0QwAAf0MAAH9DAAByQwAAe0MAAH9DAABwQwAAf0MAAH9DAABaQwAA+EIAABBBAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAD+QgAAf0MAAGtDAAB/QwAAf0MAAHdDAABlQwAAVEMAAHJDAAB6QwAAf0MAAH9DAAB4QwAAf0MAAH1DAAB5QwAAf0MAAHNDAAAqQwAAQEEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMEEAAAAAAAAQQQAAfUMAAH9DAAB/QwAAaUMAAEpDAACqQgAAAAAAAFRCAABEQwAAbkMAAH9DAABjQwAAbkMAAA5DAADaQgAAQUMAAH9DAABwQwAAf0MAADRDAAAAAAAAAAAAAAAAAAAAAAAAwEAAAAAAAACwQQAAgD8AAHVDAABzQwAAfkMAAH9DAABZQwAAa0MAAGJDAABVQwAAdEMAAHtDAAB/QwAAb0MAAJpCAAAAAAAAAAAAAKBBAAA2QwAAd0MAAG9DAABzQwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIBAAAAlQwAAe0MAAH9DAAB1QwAAf0MAAHJDAAB9QwAAekMAAH9DAABFQwAA1kIAAGxCAAAAAAAAkEEAAABAAADAQAAAAAAAAFhCAAB/QwAAHkMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAwEEAAAAAAAAAAAAAwEAAAAhCAAAnQwAAQkMAADBDAAA3QwAAJEMAADBCAAAAQAAAIEEAAMBAAADAQAAAAAAAAAAAAACgQAAAAAAAAIA/AAAAAAAAYEEAAABAAAAAAAAAAAAAAAAAAAAAAAAAIEEAAAAAAABgQQAAAAAAAEBBAAAAAAAAoEAAAAAAAACAPwAAAAAAAMBAAAAAAAAA4EAAAAAAAAAAAAAAAAAAAABBAAAAAAAAIEEAAAAAAACgQAAAAAAAAAAAAAAgQQAAAAAAAAAAAAAAAAAAAAAAAAAAAABgQQAAAAAAAIBAAAAAAAAAAAAAAMhBAAAAAAAAAAAAABBBAAAAAAAAAAAAABBBAAAAAAAAMEEAAAAAAACAPwAAAAAAAAAAAAAAQAAAAAAAAAAAAADgQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA=="}},"outputFilter":["Plus214_Output_0"]})"; - onnxruntime::hosting::PredictRequest request; - protobufutil::Status status = onnxruntime::hosting::GetRequestFromJson(input_json, request); + onnxruntime::server::PredictRequest request; + protobufutil::Status status = onnxruntime::server::GetRequestFromJson(input_json, request); EXPECT_EQ(protobufutil::error::OK, status.error_code()); } TEST(JsonDeserializationTests, InvalidData) { std::string input_json = R"({"inputs":{"Input3":{"dims":["1","1","28","28"],"dataType":1,"rawData":"hello"}},"outputFilter":["Plus214_Output_0"]})"; - onnxruntime::hosting::PredictRequest request; - protobufutil::Status status = onnxruntime::hosting::GetRequestFromJson(input_json, request); + onnxruntime::server::PredictRequest request; + protobufutil::Status status = onnxruntime::server::GetRequestFromJson(input_json, request); EXPECT_EQ(protobufutil::error::INVALID_ARGUMENT, status.error_code()); EXPECT_EQ("inputs[0].value.raw_data: invalid value \"hello\" for type TYPE_BYTES", status.error_message()); @@ -41,8 +41,8 @@ TEST(JsonDeserializationTests, InvalidData) { TEST(JsonDeserializationTests, InvalidJson) { std::string input_json = R"({inputs":{"Input3":{"dims":["1","1","28","28"],"dataType":1,"rawData":"hello"}},"outputFilter":["Plus214_Output_0"]})"; - onnxruntime::hosting::PredictRequest request; - protobufutil::Status status = onnxruntime::hosting::GetRequestFromJson(input_json, request); + onnxruntime::server::PredictRequest request; + protobufutil::Status status = onnxruntime::server::GetRequestFromJson(input_json, request); EXPECT_EQ(protobufutil::error::INVALID_ARGUMENT, status.error_code()); std::string errmsg = status.error_message(); @@ -50,9 +50,9 @@ TEST(JsonDeserializationTests, InvalidJson) { } TEST(JsonSerializationTests, HappyPath) { - std::string test_data = "testdata/hosting/response_0.pb"; + std::string test_data = "testdata/server/response_0.pb"; std::string expected_json_string = R"({"outputs":{"Plus214_Output_0":{"dims":["1","10"],"dataType":1,"rawData":"4+pzRFWuGsSMdM1F2gEnRFdRZcRZ9NDEURj0xBIzdsJOS0LEA/GzxA=="}}})"; - onnxruntime::hosting::PredictResponse response; + onnxruntime::server::PredictResponse response; std::string json_string; std::ifstream ifs(test_data, std::ios_base::in | std::ios_base::binary); @@ -62,7 +62,7 @@ TEST(JsonSerializationTests, HappyPath) { ifs.close(); EXPECT_TRUE(succeeded) << test_data << " is invalid" << std::endl; - protobufutil::Status status = onnxruntime::hosting::GenerateResponseInJson(response, json_string); + protobufutil::Status status = onnxruntime::server::GenerateResponseInJson(response, json_string); EXPECT_EQ(protobufutil::error::OK, status.error_code()); EXPECT_EQ(expected_json_string, json_string); @@ -124,5 +124,5 @@ TEST(JsonErrorMessageTests, MessageWithManyCarriageCharacters) { } } // namespace test -} // namespace hosting +} // namespace server } // namespace onnxruntime \ No newline at end of file diff --git a/onnxruntime/test/hosting/unit_tests/server_configuration_test.cc b/onnxruntime/test/server/unit_tests/server_configuration_test.cc similarity index 88% rename from onnxruntime/test/hosting/unit_tests/server_configuration_test.cc rename to onnxruntime/test/server/unit_tests/server_configuration_test.cc index bc2210374902b..77a979e27f593 100644 --- a/onnxruntime/test/hosting/unit_tests/server_configuration_test.cc +++ b/onnxruntime/test/server/unit_tests/server_configuration_test.cc @@ -4,10 +4,10 @@ #include "gtest/gtest.h" #include "gmock/gmock.h" -#include "hosting/server_configuration.h" +#include "server/server_configuration.h" namespace onnxruntime { -namespace hosting { +namespace server { namespace test { TEST(ConfigParsingTests, AllArgs) { @@ -19,7 +19,7 @@ TEST(ConfigParsingTests, AllArgs) { const_cast("--num_http_threads"), const_cast("1"), const_cast("--log_level"), const_cast("info")}; - onnxruntime::hosting::ServerConfiguration config{}; + onnxruntime::server::ServerConfiguration config{}; Result res = config.ParseInput(11, test_argv); EXPECT_EQ(res, Result::ContinueSuccess); EXPECT_EQ(config.model_path, "testdata/mul_1.pb"); @@ -35,7 +35,7 @@ TEST(ConfigParsingTests, Defaults) { const_cast("--model"), const_cast("testdata/mul_1.pb"), const_cast("--num_http_threads"), const_cast("3")}; - onnxruntime::hosting::ServerConfiguration config{}; + onnxruntime::server::ServerConfiguration config{}; Result res = config.ParseInput(5, test_argv); EXPECT_EQ(res, Result::ContinueSuccess); EXPECT_EQ(config.model_path, "testdata/mul_1.pb"); @@ -50,7 +50,7 @@ TEST(ConfigParsingTests, Help) { const_cast("/path/to/binary"), const_cast("--help")}; - onnxruntime::hosting::ServerConfiguration config{}; + onnxruntime::server::ServerConfiguration config{}; auto res = config.ParseInput(2, test_argv); EXPECT_EQ(res, Result::ExitSuccess); } @@ -60,7 +60,7 @@ TEST(ConfigParsingTests, NoModelArg) { const_cast("/path/to/binary"), const_cast("--num_http_threads"), const_cast("3")}; - onnxruntime::hosting::ServerConfiguration config{}; + onnxruntime::server::ServerConfiguration config{}; Result res = config.ParseInput(3, test_argv); EXPECT_EQ(res, Result::ExitFailure); } @@ -73,7 +73,7 @@ TEST(ConfigParsingTests, ModelNotFound) { const_cast("--http_port"), const_cast("80"), const_cast("--num_http_threads"), const_cast("1")}; - onnxruntime::hosting::ServerConfiguration config{}; + onnxruntime::server::ServerConfiguration config{}; Result res = config.ParseInput(9, test_argv); EXPECT_EQ(res, Result::ExitFailure); } @@ -87,11 +87,11 @@ TEST(ConfigParsingTests, WrongLoggingLevel) { const_cast("--http_port"), const_cast("80"), const_cast("--num_http_threads"), const_cast("1")}; - onnxruntime::hosting::ServerConfiguration config{}; + onnxruntime::server::ServerConfiguration config{}; Result res = config.ParseInput(11, test_argv); EXPECT_EQ(res, Result::ExitFailure); } } // namespace test -} // namespace hosting +} // namespace server } // namespace onnxruntime \ No newline at end of file diff --git a/onnxruntime/test/hosting/unit_tests/test_main.cc b/onnxruntime/test/server/unit_tests/test_main.cc similarity index 100% rename from onnxruntime/test/hosting/unit_tests/test_main.cc rename to onnxruntime/test/server/unit_tests/test_main.cc diff --git a/onnxruntime/test/hosting/unit_tests/util_tests.cc b/onnxruntime/test/server/unit_tests/util_tests.cc similarity index 97% rename from onnxruntime/test/hosting/unit_tests/util_tests.cc rename to onnxruntime/test/server/unit_tests/util_tests.cc index 9ed7aaa8f9334..c16f87c4a712b 100644 --- a/onnxruntime/test/hosting/unit_tests/util_tests.cc +++ b/onnxruntime/test/server/unit_tests/util_tests.cc @@ -3,11 +3,11 @@ #include #include "gtest/gtest.h" -#include "hosting/http/core/context.h" -#include "hosting/http/util.h" +#include "server/http/core/context.h" +#include "server/http/util.h" namespace onnxruntime { -namespace hosting { +namespace server { namespace test { namespace protobufutil = google::protobuf::util; @@ -117,5 +117,5 @@ TEST(ContentTypeTests, ContentTypeMissing) { } } // namespace test -} // namespace hosting +} // namespace server } // namespace onnxruntime \ No newline at end of file diff --git a/onnxruntime/test/testdata/hosting/mnist.onnx b/onnxruntime/test/testdata/hosting/mnist.onnx deleted file mode 100644 index fc1a3f733c6e6243dd23dacb125b7a372de55a50..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 26454 zcmce-XH*r@vMx%F3P=zMf)bRVAR-8?t^yS>5+s?$04fGV5X?$Wf+%7_5d(^VqN1We zS7AU5Ac{Go7%*eb%3J%KbNAUN+&9Mi^G5$zqpNGqRb4f^W`%F6$ZIPM@eLYjsMz1m z$jL2UkZmqkp{%jSi2QH8m>SA@tvV zwDtbihlX8(BIbq1^mpk0PkuAyfPWv^U&c*FtxQ%;X?SE*>_SI1wSUgwq}_GM{K&ZZ zb7L1q%#ZBv66zPbP|6b8&#?1fXKhXYxv?>h{r*#GrGF=w$qxz(`?q-psQq^y zZOwr2QNfP=|7Fm>6U-C>!so{R+o-j&YX6-}TibvB{J9a4Go79PbEN7&X=chkQy2f^ z?tkb=&O`3M>F7TwUDOn$>WT4?`SZJqzHpo8GIx*M?cVy!JLGj){T?LALDrfOyhfPkv={3RXO>g;zgp#IPQ7arwSBKK#^N z5<(-z+tza6*SM7(%hn6uU)Brlrt7gsH+yuLxS5vKtQGj|Ejp1^OuvT2!V&!L>g>>+ z7k2sv7nk3MXECSXROcM}9DY@_if@7Tv8Ci>S^{U@II_clKAanP3%pkK=Z#7GsNmCE z?DXuLP;G6_5uu}SaeEU*5BS5+YxkkqSto4TaTVuA{f0mG*=)$~_=7%k%kMJrfVMoF zwRwXWaRx7~p2x;Fui)E)D#0wS5c;pPt!-Yvl>V-92c1_+G`pk=zS}L&R_Ct^`gPi3 zcEv!f?5s*7mpv3`&Q(O`5I0h2nE+OQD}>q0&(yq`KaxgPo3Y!=hj7Evj{E3~>P&RJ?A8@-s zU$K6t-=>xPxL09>N^g@)8-h^J??_V?e`(##4t`6BQNaUod82pqJ?;2Jvp@Ah1GAD zvdq&Zf?PovZyu#YrBBRQW38V1mDL+*iL!$GUO7wL=i(cp4r*G^~WeKio z*Ts-=ThQy_WcF~~3tN_^qknE5Z&;O%C*Sz-xtzbWZpj(Rq}p(t-K>tqTb_v^sOF^ApN;>uga_NmWm=f#4Zf+-Vde9vHRrgo& z=+rpA+TKhj*H**McbPaQRhOp}SHQ-G91^;zNu8%cj8@#7gO@SN>Ho)L*~oAa{Re(G;c#b9c+?y z@3GMmH8oVBU;iLc+v_phPjlt=t|2_4(GJ(d24iH)btrmng~2C-==ZYC_;K17vRv)N z(MsCf?uj+#M|$wj%}c4itAL#?v(PT>1btiZk(9k!ggt%Au)a=*_q48OwRMU7%HN=dbJPnc_qGjGeH)|&HV;Nh9%P2@#}bSngo??|KP0lO?b;-HN%IWHGU)h z@)PkOzN$G51Mg?EPqi10{<4@?xQC(j7Z;55E$71c3ql{wuXOobDNZ>S!)0@S3H_T( zh1K2W(w8wKMSPXOBe(45r#t@Q&i73?B>jXy`r+JtyqqL7a5o!Od=`f1IpW846VU2( zI{q2?2nN4j%Qk0=ak_Rke$YO}`#1Ng?c{Wa$31rB^jXvK<8~!?YwW=JUnI3NXXeq~ zo^llI+}-`)=PWFK>&hlq`w4*_YtUoZQ23PPiH)UQc-gg+AitvvU*A528sa-+s@zMk zD(X@*`;)gYSYMvcwvEB@trwu>!~2>m<~7u|2D$D|Ex21w#fO)s3U^vI2}SOgU~%Rg z$nK~Bt7FNmQ8*gE2k3 z-D8LQhN-%T`;21skgA%{mqWRy%wNi!kqHNP{UEc*P)O*R&pIzOt9NX-$4_=%?%R)K zvYzh(Q2nIM+k8)nrrwp};IjGrwqG{)4QzrdwkoXkL5eu#0@J8SO=rzrO^djn>>WtAW0bbYR^Z=fOWR1pHoW!_0wcuK7x>q#$z7OKl)80Y7mq7mFcaR+V0HdE(;x^SmtoTfsEe1?N`3vjl z((^c~kDM)TdVU_(20x&fmm%;2idI?`@V7&e6D4z^3m)z^-LOPlA z;i!8m-<3p@if1KXc^8OXCuHK5eG^eh<~YCC(H3)j8rh+8is1S5wxG4S3(lMB$D2#m zpj^*;RT?H9M#-Cxfr8S|G_j$u zzr=Q>o@iM68d64O!TxL4DEsSM7+SDcI5_38FfIIpSb6UV#Eh}SJN9?!Mao;UE$GZ{ zMt)*IM+#i?I}m$zVci=X>3W2yoEFl>}1#a zO0l?6hYQm3DAdM`SC8+>im%d0bJQ{XuKidt`idb$r{!>(;uhiPs1jk>`7F?i_r-p; zXJJCw2O7HgE4-MOEf%fwgRG(!2pwlf`fs0$=ciA{5rK-Rsy&wKJUSrt^dT7jcLbQ! zcw?Uh#T*O)?D0E>)y`$J{=OwJ^Yl>c+haE*Me1|sLSMG;*g=Y#2gIv^rtY3<@~q`m zO=x(nS@SH@5GWOzn&lfbu+grS`<{oJ0Zp$-nRzlcRZMgHMlI;D3n(E=IylLrNR{CuUMni1*>kDUm=6H%K zo41JNft@hFqlo9;n2d%$OYlJQCq8HK49D**;Ik8qc~G_+Ui*EFo+w#kZf_eHq%6Vm zCW)l>_f{UBV~EdlGby*;hL8R1#BL51P_WmM>{sZMx2G$2wzPuHxqqRmxGzO6Pozmt z)L2|{0=Cc6!QDHD$F7MAfXP9uitt&pXIgY6_9ch<6quh};{!{OctAsIP)m&{3^Vg~E9i+hHVja-v z%^6&-+#N5EGUs`p^g&l|jTkq*GhI}6WP50V9-Fs-i8M~SCTC5?O_{KoP59vA-dGS2 zfvcao!}al>MdLr$0foVQ^{X0soIgP(jj6amT*o;tE!|(tR%bt-WVn;{Rmi`aja|k} zXNwmWIDbPd_iPB~@bU<;r_M_>)t@astRK#%L$bK*GAC4hxgJg){7e;trMZoN3yhrN z$c^JF39j0s+UFSBn(`7h+gsxn#iQIq=uDbX@9FOZRi5?5ob}z(FuPw1e6%=D+vYg1 z)9T4&zeWO=hZe&rzjDa)E`o&YMJ%_OQAuo~-~~OnG}9lFa!28Zx~1Hrp@zZ*O^H)b zhZvUm9bUYX<;p4m5 z2&8F~esH|CC8W(g00UFx-AlcsG4?A1T$F7=o&zTF*mFzq(_l3|8hlflce%K~ag^hA zwrTu){4a>Lnj!h@Wm?;KF%f*5-wRzd_T$IQ4`Q!jEpXzZG%s`X<7w9xV|+vl29NB1#@aoU<(?zAui zf9wyzYcGfJ1T4YOu&tc=+q$O6LWa)-$Kd+PWf;@l2NiXn(~+SG?CkV{nmzXM;6dYg zkz+o+7=0F(L_~AIt83_6@sVb}w1lY>q9A?DJuJskZauJBd=s&SSDKw5`MT>g#cs63 zw@QY0yfCJ7%bI!EOcu^K4rHzTBJ4Pv$fHI+frmpb!JFmsh;bt%9&z#*BzKKIde1?H zw}q_tQV(Sug5b^^SB&btjn?QU2~}20_{H+A=-&4(YbCG7b!Nt}*E>O|n6ne7C6tR= z*M@=8#SqbTQZ`&WRwv1M<;E8;9|uYD8$n0GfE+@9LGs0w(ECLT9GzKBDph@P(X>50 z;*J)#HYeee;tojqHG#gUKa`A+yc4=as)(%(5}csmCn&VWQ0Uz@iHf%+Iu{3mdxBC((0C*i%m5?QpA!>cn~Gw5nIR*hhq{-76x-$^FP z`wWkAG)IZgqsJbVCGn#ngwNDYA@YAiqWmc{5Wqt%-dCtvnI!jdp>Oy zR!w_M{`H!y?zIn07w*I#?{?wwPP#Q!YRceTzmZlfoyV)G1ZZFhDx7Gib-z0E#l`QT z$D@yAaBm4-jJ6fFf9}PpSq(H!zL{bVcOkRuBYE{iK)K7dZev~y5I+SXJ<^x1VWVLF zt6?whUHBN(9~ZGo@>FX2n1}sF`d}$-;=Ir6>Df$Ae8k`QviEwt@i7*z<#izce;7lT*#l;mIjSIAvx7S=cq>-p9SsB{2%4w(X+OchgU>&{P_ipU~v}JA?4+FM(gqFvD{;9nf^;j#%yQ1Jik* zP-;C7_TNk*uSKiGv3cdV z=XUABvfgJvQknpJ-`?TKcTByjGNJ39>tH$Y80wF=7JQ5M(9{urr1r{(#p-h5>RbWW zhV|vYD=UTamPd4T{$?8MtW#Um;}{;%`UVST1>k@~^0Z~MGA^)+=Cj63d)F#(X}5>) zCgCIZSnomV{g-j%r#H0an}jW{ZE_d=$MA;bow#S$1N7eR3jR3R2d6B13C3@JgUQQd z{C$QeX2k&?^-Bd!Egv*aDyCf_p1fP}03AABPM!TLYx?yI;i|z0!Q)VXBV2)tzo zF<-vZ?7uhQV$YL8^MuDVWonePW-y(W^zg0mm%BjgZD&I1=u%4lu#R#RO4#n_SoCP? zOr}4F;A(Y6OwGLmx09o}i|s>5om0q9&-6vJbK7a-AvyQWmLFhdlmv(DwCC@m#-P`W z&1_zH8b>S*5)aPKgAlt6u0QOHS0@ajojv}-h(BqBoC(7xq|*E!;h=R~g^KzD#awwx zzQMIM>n{uzUcW9Ae-DmF)wD2BRJZ>4;g9$HvB$9HD5-Paaj$xoZCQy=G><@H^*VG^^;L@vm9#U7IFO3 zmBNG=J?w}|!0u2@je88SY>+cSi7LuA$#JieVLTwd7ze*6ZrFE%zu)MMN;M<+D-q*pN~P^6AB2-K%Jj;b6G&;xEvm zBjUb?RrFZ8#|&+}NI$g&w3fz#lI)ed?~Jq-`$3+}7OLQx!;2`8w?g*|4s6)fRa)cO z#-r-)!^ZFqiK|})Oia$#Gc&A@)Q3AtB~Y@?8CsXz z5+~4F65}6$zNwP1M7JMTmssHEi@V6oJp>jVjKKQ2+c>c4J~*{afd_6^QSZWNp=TJd zdqNXfefuQrj0@wUZFlL|fn08D>@R6Puth8heJac~XcL7YPUvd8p*k(RFF*U}2GN6L zd3IAhEqi;Hwk=A8h3ke>3aPMiksQYlsS|Dvsu28A3gPTz8%_!d$LO&hxMIk6TB08d z&5E+T{&yrV{ zl;hDtIXoN@&ILou!Ew$&+I-fEHumf!j#z)6a@t>#{CP88+dm9F%HB}M=YE_RvPE=D zN)&x%e1)#I=Y^#|^ij3S26I=8Wt|gILd^nio;=Bz3r^VK1H~;6SNn+)9UEw7vL5IU zDMVpa9TliFQ=0a3JUr2xPsT6AS#}CstA1a2J}n6cri|pZ2jqEPK_&pwL$wJ=hk0g*eZLWVwtpGwDJw7cM-71jKjj2U%Tm->S|%l zFekDawH0>Pc1Eq*`8aCQZgG8U1}wO-hH{ts!k>Fa;N@pXxc1GX+3A}hy zcid)BOz$o@HT&7hqsX zcWfFoho4^ttlu^bp6a%WfqPHV=W(s{EV+~pth2_&(O!7wy(N!6v5tddrq{&%ZXvA~ zX_#?JnQFV&(nDJfPPSSH&7D&4-hzHuoSVo;+-A|hrVU^o8cP$BMY?fo2Jf&k;p>`P zs%`eAW17A!3crg`X_*3#?`s9NS;l;QMmuJYjOV@c_K?`Q8^`-@g6s8rS#L}xjZgh8 zNoa}}m5ieCmeE`6Q@@6zZg=Bl&IMFAdI2u92_(1mGVZHKD%K7&>Lxf=^%D*A>QJ-1 zCy(~IClp4+bLd+YG#xur7_u=!+H*UNUuyQjJM^W#mN^`mXMkgk6!_?cE0A=klC)cQ z(-gDEk}s2V`H=9Dp2!@AYem0F_0>6aN*gXYT{V(sXiVXn3&yy`Ko#dLdjreX$kS=V zTlDpK2u`(nBbM%*$D83g)+&KvIHAnEy zob&MD(*touR5xy1cUhcrcOi~jHc9ldX`~|+)s%Q7Nr?QEjYCJ>BAa=2v?8@HB-#5> z>6vltZ2AEfgpLEnrTT2{b5GQEt%YCrE>o|JdQst;516@{i2Y^nK$oH-FzsQ$sU_>7 z)3kliH=n8aC&P2K9L~4xAgQ{uhIcGEaFY1Cey&ed?H3zX@$PIeg z;~9)`_QJm{-yk!5r9>8O$-cilFFAQx@*?3c8MscA#%nQjUsQwsJ+sguk z9!e7yho+EfR425SB=CzHi|~0$5MI!9q{-8hvFU&l8a-9T=NY@X%aBH~x8Dlx{UU-p z`)ovYX&p4Te_oAm!g}a`qzD@|a-nv0*P07`?$KLm9GKc3Tm5Tsrt}-VM=n1zc>J(h zQgl4VHv0LT5_}X*Zl+Od-Y$ON7{SMEr&0E!PdIwSPW(jelIyu=d5BvRjp}TGy)OI} zOrDm*>~F`xV8vze&VlpPRGNaFt_{a1{8i)r>o~TMmm6fLO z;*t^kyvGmttgsq}_teFf@l)A!8d7skiFj#ev0F-J7n~Fu&(;bngv;U~%ndorzx$O5 z^&bi_*f9!Zt{}J^ohHq}{?c*tLh1gng*La{Bh%!~RJhBKR;eaIuKNqveYF$YEwsVs zV-=w)@;-!m_om5R%xYSV3Q&J~AFiLfj-P+2#J7D+QSPrBo;ak3?Ymvr&a>UX!+IEWR+6 z)_PuwuzYe6efyG0-!~WWpX{;ttU!gI9*g3MX#@Gj*d>r~(4N+hpNu5~19@nx500I- z2NYL@W2BjG?R*(8vJQ)8@3Q@vq@|6eYMb%U-wc-d;|I(8#){>S)wpVAys&uE457E5 zn){-&ci4@Rxnt-&=>0bpGxLi;ZqH(lmH!5ldc{(+`bPV0Bju_nJ+^ZX;WThjugQ z%zQH(_CSY+hFRgRy~eDlWy_~`*uu@JBF+7K22?KG5<5K_KqlG!`Ch_0+@0{92NZ7P z;d4?^Bl9R7SU8?b&$eQKS`+EDUqIikoofx17DCEzW1d{Bg2nb9u~<(6^3H=<(yH!W z-dMq5EaR?hn$G)Tc2S4TS@ylP4Y}P0lU81YmXfYw&Xq7YD~&7f&DDaf$)V(Fx2r}q zTbbWIxk8Vh{1rYpe;1wg4p7`QBTBh?ooYSnNwWDex!3MNmy8TfzC02_w%-Bw?xm8C z4=wp%|0XC1pF~GyjN!khK7+M#Dt7Cv&g0(83Awwfarl`$cD`eX)^>jcy`j}G?Z+F= z|2mnEI~Uh@ch^IuwG;6Ej{vOOuEk29dZO7S4YBt1d6GT73l7R_vcuiwuy1Y#tnkYe z8)sgG(NDr?;ksqQ!!|Y4Fpy!#vkEjSXEn%n4}|9nj+3*VHyh8ifKI(-ab`f7SpCQt zy3Eetq$*oB4O77x?}wt{jR#`T+$mzevTj&>;W1grXYi3CJFI)Uoo7TT^5%ELA@Qvl z_xZF+RKGMDR+?*~!I@ihbgVw#Z|uVoodODw=4cw%J!=*WapnyZhY5KpbI8rOmASP@6;zCI}3aII!O!GS3fEjOZQf%icF1mh&eP^q< zkL%vb(_BB|!RomRiVBw>|l0&vUf9ekKh&wVw?9b7@SwDUNvJij!<@`EThd zYMcIz8vXNyZ<)RDXnQ7^zMT)DKK*Fp>n`Xi+lxcGAuptbVBI4FU5-2v4Gt@DcCaHZ z4>V+dTxUpjrh8!J^p}F)LPN3t=PF^E zxAgmJbK#05?Rb84FYf5q1anMxh>5U{rY82G{rx^l#GJ7t>++TqhRCwPl2{nIPn!?4 z-=OWCt)biNc3Ab|3&nU0z`aS1Z1yz?&s+Dw4Xy>^uOFUl;H%HQRrK-9%xJ#0@d4y} zkA*>B5_tK;3%D%iQ;m%0HwskH;IALgk&MDnaP#TMkM2%@&Q=-Vn}19g_@%IFpr<@3 zC@rK3iRrN2(g?JZ*VC_xdE(bY7s+H%B(*PHL(`wFA`8z)bfiHY4PV}Y#p-tKf5DTZ zM;7wbt?A@DA_F(%A@9j=z`QQC(!Tp#Shr#_PT7=#gQlbd58NyH^i!_3WUekX4O7G= zlh)wffeP?Qqn54he1&6Hd8{9thSgt&@NG#F?7ZHBGm}qB`$iw=z?V$?nX>~9ExJR= z8)n0g+)JW!h$00_eb=`=_SCGE0b4#42qV@ipwV_E)>pm)?G_jDZP%&nH>8Aj-%e)T zM_0uhdk>Dtw}6sl1^0v@3bbg0GW@ulMoXLt6Wb4gY{6unaJ-lo4)({)92=;+-i?fh zCSl^f6qXO!j8UvC?Q2)y;v0<|KIe>RnQe^k6OXXsx=Yy0t*U1GuSC}U@=J70{!Xv2 zYw=OtXo%WUK^{3(;B-S*uW>MxT^PeNr8>dO1E#`z#o436bWTeBG$zVi?rA`(b> znIZog_MF@eav&pZxLd~YQt%3MWt$@($!pCdNkVBn8VvEqZ-JrYG*F%&7~f&{WXYZznRJnXC`sCw8K>NHW?0_PUhA3u9NDrQZ9U~%C?2F zw78;8csu7fs1!dWnfLB2YZuP9M_+@Pe+QF+?j2FkJtX>ee=ST^_JmcV8>;cxb?_c_ zl=$})$}%c|{xfe2^1qV!dD%KP+ZoIU*X9Z)4>CbG(TRS3-pX(H%kr!y8!>t5d(isU z7p=x*kip8S@K?nV75b#oywMtbNp3rTQ@lxPO9;&Jw&2YfOCe`SIN$j;nT1VoFtlT`AWeFy|~b!&6>?tLAtLnWxTwCZ+RuyYbv!`$xQ&KN&7>w`2PWd0bd*O-?z> zxpQa(1iTw6-8nB%?uty-pJc$*gAS7Y23bjZ`XpGo)E!QJu;9B#Ss1r21y)~QCiNtX zA<8X482RHASs%Sk7jDnS_5q{un$ZceUbcca)aan!)gUnNs1rjkHA4FL2;#9@DPYtb z64s7}&jC{OqwXp&b~53uPc(Vq_C11OM5|z7eud&EzZS0R%5z@yES_JG!M;%{*wI=D zzpno#gD1MIt>M6p5nEvJqX}4dce7|X7RA8qo_zgd57PQNTin-ElT@yBLb+%0IHb2D zq^e)wN7;oGzTN?BQ1I$za(Sx@$7grK(`g1Y*sPXV zuT`???j9jMat>av)e)}^vu7*Og|!<=5XA26*)2qD`lC#jlgnwp`*dEcpTMWAfS-(< zL2X;kK-1XkU{dS}rY;&WSm@Dc}5@aC7S| ziK&bR5A=Tl`W7+L4oe0d^_OGqCG}#>)5&;bL>yL@N3iO`PNMmfVSMgsCAl7V<}EKq za%}W=PCe2}^|L>dVx0}x8VbU*v?@xOXvwK7EqTeQTDswR8D4aoi{@HoxIe>;BfOu9 zH{Kax&2?qa_~ws`$K8i*E^?Thd0X)P0qi<)BByB^v9-!j9wb~Lo4e_B?~^M(Xqmxf z4_8uD_!xFwP(e#?EAhqmpD5JUfd3w7ge%uS!^AfcSY!7}?67bGk5N5wZeI~(+Gb(d zo(l2Xa8Go5y%9Fd7zX#Z^q_Us9wZYnn}bz!sjS=@M*Y}MZHH5M`MGMU(s5vk)w)2qYZANB2_m?5m}i$M|cg0aB>S5z}4RLC~BJt|Sfo!Z9MRVnMQTmUm zoFdhKlBZnl!u{Jh=4KbZtsEvy8>T|ZYnRfm=m>WF(uaqJzo&uYtLeQ`3heh@z>Urs z;{GY^!pvhK*eAxG&5rKCVkxTdhCvLgJhbC1w@`jGXe6rt>Vv+0mxJB}dpa4B!MTlT zurqc8UTrjht>bKQ`sF=5CNr7GAJ3!hI>R{5XdSGZSs)CH-9VZ1rqbGxw?*%G54Q1O z*nV@6_;SC%@Z!6erLdQ-kNgN#I_mI6iuee3?I5i|CvZdVR&3j5jvJS}ht=}^(R$%z z^geb$T)QHOu9Ovt{vCJVXWSdoc;5_mT@wkmzN5~fw4(bukH#ABfHK9Mg5B~va5AU_ z)OAd7YnM%2Q0>Y7=4jD;EO}APGjHTTn@N!fmb*vujunJc{dz7otI3`DI(j_B-r3{H+E;XD=4k9X3jwF*flYd?)2lT3c; zGW^wSKY5&(B`nwI0#7$t@!j~PEHNiOcf*B0)o5VKsjC3BjkH|flxx>q;={T{Fw_4X zJp4TrI<*Gk+LbpUMsssbg3Tg{)%Za?cu@@`?mkAU(yX+oz>=@}cBfLup`?=@L<@gQ zyI74!gyp;Cc))OPJ}@kkCyzP>rDta0?NMhy{ZcRLxjTm4=V)?-n+1ITFaclKuK~N? zTd`$MCT%odM4q#!i+ROiIKAX6T#}Eciw=dfx;aNe|A-4z^4U;JgN>c1acF-_jvlv~ zo5R*|RntTUwN2EtcNoh&Tg?u$QaPj!neWX&i{t~sh&MrQCSC)Dtfo{5{Gm#^ae=J6 zI)D`x1o7awUE=vizMzqrFDccPqHJ0h;Eqp8BriV!rf~*7`+5j}gpI~|be(&4Ue4+r zH%K>aAf4WEU#Rr2fJKerth91A7Yw-}QPIy6zqstjo{~T~vb&kGZqB8@1{S>jg^9S< zJ%q(K6|jHEMGCyE&d-MXlcAqEPo1$BtUk^M!-F%)LW(ff5M`wMWOponqa_9N7mGAK=By5A82=acZ}*|#?(8<}=iClqt9yH-=r$w1UXv+gDBlvg`>$uk$)@~1;4R@BYi`yXDn*FZ z(96*WIsV-~I`dMGx712A)!Za1JA4bQlNF&n&VugFwxyGA^_UhRi2Xddm-ac@g7?JQ zN)0jjwuJ9$DMH%aDc}@46QTpFAkAh2Rd)1(@5<|hQ)i{1{Ws+>OKT(e_-%q?X~VI- ziwbTw-b)LTL^94kASSA$fc2DgA@am1-cfm;PwgHL6Cdm2!T3b+>N$6E?(4z*Z<|o> zx;ly&u!tZ2c}yC+JILskH{W&~%O_-yQ}?Q8)SfhgpB#Hh(+w9=O5ckRaA;`FP50AK zI3t^rqq4c+!#yZ_X@lx2rc$)nC5q6SNrz+mvP?=VTsb4p#wHQGTsTXM=?XP}ZYJCL z)wI|DBAwkQ%ioG~=yIAWt-I^b2li<2e480u*Q1Q07oHK8Ra@ZqeLeYvPBhQbap!Xn zOQ7!YZ;A4v!CY~_E4k!$;nLR<>8TKXxX`keC$6yOm0y8_*3IFvHO_R-I*1KSR#8LU z9dY;9DmwCJt*G#FJSQmIvGXfmN?mAy)0f_)0#V>`k;SyJ@T_p>Y7MBcAOM<(MzGF*8`Hz+(A`s zt&*U)FXE_pe^isb25q6sKzMzdnsf`G@5^+zfmc1zHDEq$?;FDVFSODyUn3lA+>K7R zO`?%WGssQS7uTvd;N!mQNiEBSqrJ|-%L9kVu*eX+D!eHrg?9BQpyl!(L`#)3 zn2GNpza~M@$s5Qc&DM%bG;YC5|2D~?5wWyqYi>|5xz>4lmp?vaABUb`T(zb|cf)Z?vU8q3A_@#*k+Y^LFf zN>wUoe#D4Vw@u~NfQ_7dXFat=hR_iIwGe4&i>~dxFs{;-4u)8>;M9{PTMyB#jhf<; zQz_(gtDWNH^H_4G3l~dr=&0{TYTDuf;dN_a-ANSotUm8{(mn&%Pq>aXZ%VLx+&E}| zSxxCHs%gRNA_{5VPTOx7v5m_b`k5_7wi&GzCeO(bP9)u+hS`QJb3l!6>rNFlY+J;; z-@e27za#klgd1Y2;#J|#r3JL3V}|6a?b5gv)#a?jxj=?ryVk?SSr+J+c$)qe zr?LGLf3At0Dwud1!-v*dvQh5I7yIQ>llwio-=;#!rrqHCgA+7t?S5MDP=+to8?s^K zQ^~>bV7Pm;RM5ib@FYNr4C`Y6*L>3;^W;Op*kPKW*yjnI{xg^Ro|eFq^nLK_?hL*s zbCtH3w~*Y7^HiGnOWd<(5Sl%7f>W#WMZa`4cJ#34w2%d8H z$kuA0^w_)A0JOUHJY}XAU~J2sUlA5OQbF zfFJVVc%ZO1R1Z(CF?urGO=)QkYR7yQHjPrJ!}9~U#$ggKZ@S8ha0HLqIEm%emDuw0 zboPx*;PAi>B@uT+tUWS%BbUMlVFh6h52lgBdhlujV@A@ z^wFm#d~9?AudQ^3oBr0Aw6PF%lHG(ogW{lQp%MGaJraJ~0R_f25I9zW-m#zL^k4>! z^u9=AmOK|XHP%z$y4N)-&Ms2)P7?X1JrskEWwQHH7tWQQ7WDnHOvsU+%GI^&#l_*> zd2)gcPpFmWkJ*{5plL;+VP^Po`ZJjGxd;X%4(CH1$GH5II*y(kiid)~35VCai9sJ` zahxPU95%QwwoUDh+0Wv^a6k{~{Qg3CfhHVZa!njkeO45=x{CWhTf*eB4vF3MJMgU4 zhIJks50c$%!q!C{a6CT*n}(>06|O15 z^v}v@W|{$8rKtXkT3(>{zzkc)*y6YzA4oY?i_;cnQ)A+9$XR-cRQ#1`k#R1C3?EH> zLnrdc6+L($#e2=2q%@XDdAX-XESs$5FcVBu+Q%&Sg#`VPMl#VZfEnd}*!>4x3T~wD7B3uNA-fqX`K;klK|#XW6V@WI`ge~k_ow*T%Xp1j+{;l|bM zD0e{!*xOF~*AC~%?TdMn>?R(Ra)zS+Z028fA$+x8GOf#~C8s(?Uf*pynHsMYO{S@E z(3eY)x?A8RH>qA+G|=8!iNltZ3hy$92>Z{Z3s;x_rm<-bY5WePH;?D4y@Z5hA3#QWC(a)RY^-^HMq6Trr zrVu{$U72HDgYiwtW?Wu&k>}6d>E^iH0)IDj!^PVDIBrcipFQ-G)<~gwX1~fQ=S3_p z=o`dcp^duw6p~f)FJ8VhgW`bCA8_HCygF^MoJ z+Z$hcZxGhZeolYppP8?b;O%%U0tZzXLVz{#H`vJzG{faUIpP_JhNVBQ-fM`tkm+PW);>A`g%D zpfNC`6@F$xa$xPyO<{UHYVb%=9@T0&WY6@^x|j@ z18Jkv7u-+x1?XDL#lkM}_@zTI>$;0@v(AAUcH9s?KNG>`ikj$>S0f%$dPI$DhQRKI zL$If31(kfhMsKf1bByH@A^zr8eEe1!%<62p$EW+SAW$2-^fbeDI#=kD$xJK^*#-gr zU7+hU9WlLrDVnGD2ZJZd7!>)AzK-_*Dw5$&8x{HSmSo&=S`YU>3=`h=b>%5;zi9T; zN$9m!5qCUa!S&a+lkvgZu&()#^mL#R9;tNU8*bZRQ-dYF&fAHRBNp%ksgIWQEfp!fc>1#bD4*E#|7q;X zynoDR{+R1rb7s!WnK{?>{a#;|wVr}K-y)d$ zd(6Ec8*==dDYJ6TTUcf#OlJ!8Ft2@!Nb3$$%x;gu9r8EGEZsNYYqkm%yrNNRxHA

o@PU7;m zYK*CT#LgA51o}gq)=Z8g<&NFt`+kPz2D#A|_c7?XR0}bA+b{2bU+mCz0~mVeIC3x zV1YThdF*E?BoQHpnS$4zXf&PAOm`Ir6%Hf)b!#$&aT%<6%EPJfyGGJHYZ8A-vNRgZ zy98e^s8Dh~mrM$X0;8+LK!~_v!H;m_x?KZ1=gZLQWO*X4l8Q<9^T?8=BBZoEld(-7 z1R1@9FzJ{+Z7(#VYZcU}n}<1;7-f)(P&vACn-;5c+8ak_I)YeN@mWU}vN9Z|9~z&TxS`207S zc-J=s{d{b3ox%=?*bz$>2jya}$wXWdvJ5BgZh$4L&w<0>JZimA%=+_Pawnk{;| zgzoTaqLMo!Ny%4h5;S`wS{16$__!Dn7DK24H?Le1+{5bO12{iRiLU>eM%N5HA=(9x zSysQC9SDwxh^`=NIjBYjU)rF1QZifpu8s8BC*TUxqp;evjaVI;3a27tQRAi+-aq$* zut_?Yveb+m<Ov&M)*r{xQ_`AS1W#WQ@t+;rX1`eEDh@Kgv8I>w~QhrCB z)%i9O^Ky*w@kKYluU*V-nNjS=A10V(e-_rBx(0CY2rH-`4aFCI`6p%V(NX9#?~>4b zlE1-*W)?P3$;l$LR%;CnNll`GveRhE=R@%GXE7Vq*-V~oZs%ulRTi#GrqKi$Wyld) zPA4{6(YI%{Fi|y*ytlgt$}tpYC#FM*w;>$KD{ED|QN%v`b*OHU+*Q_1FcU78=TViWW_I(aDRj5iB)Z$!pX_vA zM-yV^5;al;_mlGZN!MCIgj?_PVuW#=$yG*90m-SHGq5Ak53khpfR1Guc`;lOr2{ij zbbT-gsJl~3X#qBH?r54MC(bO?Na3E*cF;ZK0E~YuD>LOCasO&VMpT~yzS0hSBb<++ zonGj5H=Z4Sp%oVkmf|p}9^5zV8Lm9EAA9sF@hoBS%Ce(4-M1NZMtmmUOkMCmU@gRt zTg7&z9D|M}a_~v#Jh%iehhh3BiH?{oI%&4R9sg03s_4>1=64~_q!K!XUGT2!RaW>* z9K14fC83gcNwfJFdX{I8W18>6V7@Xv^!+ON>ekAPv)O<U3)QZhkUeYssm~J`x=6`}r|;DUtjloP6Jg58zuQQSbV@OJ$ysnasf44A z{IP6L22;T0J)$%C_@THJ9o;`did`A&H|q=eP;Ekw*^MRV)GcuYdy%);f{T48^^nsc zcj=h*Pk^>J6Uk4S$gYy33RwrK<*BpGQ5ki7-IK@9v=^qWPhQiveQBg*r!bO8ar9oj zgjZtkPS<`@VxB78CSvzHsB$QF z|IBJJ4k$Nn2wX4M;-_0(Q0Q|9RK*eYPE@7CtkhA2T_XFDvYDp7Gl&P3;$-CcJH){1=&r(do=AeC{EDq0767TT%}* zXyy&eC6n>!T1ObrsRs4j`_TR*9U_lGb8d zQLs`?i(1*}Gwo_AWSrp6{iJ1%F&Vi!hMyE1 z!9P77#hg$qOxv^-)wbj%Sy;g~aORk;kGV z!98yU8>QRp_B&d9%7#C-vhqQr@`|Ox8^Q)fhUka zTsefwckd$p%a@T%f}1HgE^Q(EI@jRNFPGqq#z8oICWPczT2S4&MeL#Q`DEGf91DKh#7~uS#TS!?<52rKfSU2VE>#WY+HfZ#G&+{rgbyRP@)F>N z{%Q!=YDdi%x-q$C;@I}Z6?dmUgH8`ElvZ?w{Ky^n&}2Mz&=7LTVj47A<+387lh8J- zmUr?)7A(;*z@-t@FeIvv+g?6{pb4X3tepWJ|7!uw8nc!xSSAhjMa?ko{zJ0t2bVV& zodyTa+T&s0Zjkw6j{_#c_|9Gjcdbmqk()Z9=8Q7G>9ROnjy7NgeRXilYjN;B?8JCP z2omwkH{_u2LD-`s3*{G|vp0uJ(s%h${HhaQq3g~pp2kQQ;>$>a(bj129=ry}NCE8I zaUG^+*0JCBh>#Yy9PrADB!U$Hb=K>!LCzHChV;XInI2;5f73$7WD?%YlSe1J({R_b z9<&2$LAN0j+729n#I6f$claB2^WP|)|&^QBxYlori=p2ywu^TE(2B5iUGptyb zNkqKMz(_(HcQF^qT-$rhpxy^|DDgE(G_7D2w!DkDO>`(ZF@+m@%IGcypW_TM>z0gR+>z>g)MknI-Hz6*b6Z&yZq` zZ?K_15pE`DL$~llUW4~9a?4Es=BFoP%jarxV^}@W-an7jHo4J*&-YQkR6Vk-XcBr| zs9`=eO2N~Gxg_228{_z;8@8$oUmTwtQdYT*0kmH6y|F@AJ81sSbH5P9PUqjlW{C&}jG1HU-jX)zzC z9uUBt-y7Kd`^I1cUl|{sy#tlux5-*VEzkQbb<{r>Po^DZo+Jhnd}HtCo=xkXgt1j39;C4g(!{{#YnAtOoVYUvvn|o zY%?At&p+zI{E36)&e0S`%dr7I$QY4VE7i!{84scD%z9S5sEhfLak$QS-w5Qw{@Cat zL#2cE!D*{$bne0h)Hvr0fBl3i;{8yDI4dSGUI$k)mb3N9y5bNJ&GKby1g=BxL+%64 z^MTUqo}_}S!W5M@0KJ-mxTQ}3XFPpOHq_LTwVqNtW=5ZCnnzvtqOG9($ z?Dw|Rz+NBr-n1qcmM!9tfmx6zUI;^L9z#lwFo&Bt#CmU=k1r(z@Vc!#I1q6TPY{h4 z2W{bd{AjW#UkCSoutmk?q39shK<053aMr~d(UNy+^k!{5;Ta|~I_tx6#NIvR=7SQV zd|ru56syo>wT2kwcMbw?x$+J_vLO>TsE{?=PD7Ql2yxjy1vC18f#I94gsfIz3r+#g zy?7oTFHRvpy)*d!+sgs9rjar0y%@`GJ+ewJ7h*4%5%1?Yq$Mtr|6r*(V-c{1^897V zv=Q<+jEKuO?9ugYJln6u~4!!g*o6npEq098jEhk zP>+CbytGB39L|M1Z-Emz;ND5J#Z>XQKpHl12)gd%`>e#=<>+oFjUoE;uzHa+CN3=m z6?;3Zc_NPYMkrzasul3KKpFcMO(VC$gGhNBl3ccSegcKUI{HuxnNJKCbQh>7dyA5931Rc!F;U&M)sL9`YsG(y#}U`{2y0g zSjlcOF;o(39P&VG<5n`fyNB^kKT4eLE~2G#e(}b9HAQieQEc{9A67yp2o8t&V`;V- zB##;a6~Sh>+a{hD{4o;R4bPKtk{duVjzgq(34wd!UUvIve`fMJJMzNE9HcVDF-OJ_ z?3+d5Y_J|I_UvX}rEa3p%VuFva16h@Rf8@$JQYh7Z1{2$H?n(|&!CnW(qyM*98A^m zAa^?>IQHBaDl+yZSh;aH6E418T9yF2zXae%uY*u8_nk5CnT^h$mf)E~@8NUp69}Ga zPE(a+PR1@g4+h_DaKYgZaBlf1Y`nmIlLF2_dfI+Q`_X0gXVp!ZR^fpWa`hz0 zZVY&@a7P<`M{XTziYEHqAo4+nJ}ooHew`)gxjc}3#C`Chy@jVF~hA!3kEwpzu~Eq*$*rpgiWpYTCz{0d+;|AYx^0`Z@0oE2r<0iXmW0|=0Ls=2@cP21A4MS8?5yBCk zn!Hs)La6)q!mqZO&^4$)&jd~*oiD23o|+DJ%g8}y-BaGUN;mW~tbp*v z=h(3k^|12J9uU@(!4TPdpy&LNv}je3MvqWtgQ6IoiO~b`Gvf5J)B~nOXCf23e}IHI z4iKT^2{1Ts26?%pi?rX&Aj%a^#KWo-EO&N+NADcAu5SQvel^&?m_YmV1khou2ECqo z8*HDZ;>&I)v^uOv{q}tVoB89oyiPFkX2|1>)?#8e=^9VIbBNi;v4LLHIbhZjCD^!E z6Af&g@m}c>v=<6R1*z{$|C3yF)-Oe+#iud$X*xM^a}F&WZ-5)P|3U8yzPO}&KW!az zosRzA4+)AUr1xYIRWqrjD(aC~(*BUB=|5%5gPriX`Ye=ijKXR9m&n5tYpB72G%~zy zh%C(iLQL#6>E;oAOwEyMc=&KKy&gV}ro{;$`^X*jmb$`ErxqeTMHcVRR-!N1e5lYx zSaf%Yo%)u;uV#7Cc9~6Nb5{XToH+wpbKkSQa-$(V_9~kr_#PhHX%T!N2*>+E@x&1+ z=vBT1x=-VYqjn;@Ytl`!yXPM8bhtS(Mh*w~d$7MSkIwL#2+7C2F|bY#F6nfywD0J59OgvSeSu&EA{sJLnw zG1@bgT7B~&u|oaKa%lys?sAPddF-O2^2Ldng9NkGBbg3Y^rL<F8s z(&m|qDe1oqKdmC6mp_h;l(fXN>5j;1Gw8YgDcF6;W~9E!(NFgJbVafPIPbp$PA;K9 z!3>%OftRpp7_h3+h%-F!(~Lw^)Ud)EB26&SM4$L>PPTLvuZ7x+r=js!1$ilWnmtG_ z!jM%r&!NeXnC_Fsy9wrazta|7xr$L~q5VWDmSc;)IRuX#_Ttn9O^|qLD}8RnqtkC@ zvR}tVpu>$I*1f))JZsOu2*EU#NwkG9RXv=xEgl@=(#g&^3jL-}V4r#v?^Drn@*>=T zNNCzn!}^)HSWX2k+^(?uFMHxW?jA1;^e37%FJRs~Q`#S&1y0YVlj_Gl*fsGgT)RW4 zfDpkeW+iZ^>>+#Y)+2cN<0c%SUPSkjCTf3(g2CP^#OX^f^I+U1E9Z@|R6%z+xi%^Y z>w*vSbbc9Q?l&iD9GFP5R>fM4aNkM3I1hnQz5;4js^I-V235Gqa;?j((QmIYiZ0&8 zdwWJ2lT7?zm@l7cyyy>mcbhU>dA~rfUI!ADz3C*UD`Zi38Mz@7!L4!b!-F9!E5%p) zsn@D{a_biZax=;qzlK7NC0g|PTqkgj*eOBuVMO5!~n zJXb`XwW$%^)~%TObqrRw$brnKUFfLV%1$&CBhj~%P_A+e3F;`o>9w+S>kU7eyrZ6d zveg;YM1?VMlM!9fIfXtce#nlj*#O1CYILo)8NKYX0u=Nda82TC(0Fs3jP;Br7<`6N z@o<3N+}-fRS{p3{Gbs&iAoq87@RiGI3ZIYpX!zm5R8#C0fd%;jP<;$UV)31n)_ zV@9w1!4!?RK*wP_nWm|(Wb1_vXgXtsWsEaO9qA?f=V55YRRb-$6OEQ4f|w)sl-%cX zBJIO@==kb8=*fSF_LufpmUjq?gZQv%{uKc~t+{)# zC#npNWn|E@;bgubMud7EL*003bQV@Vqd5vj2ZKdy%MN{<|9(@gwRO*wm*#2 z#lPo01TQp}T>x(^cI|B!*ni@zQ=CZKn#i5BzYPu z&5{6nYb>3mOsjY4)5Rb{)oV4GjiowR8&F3(qvud7#q%VbS<085qKpD{-*~!Qo;Gs% z2GYDNiLCydgPU{1vFBkB(>fqZUw?9~hM!w*l zS9Oi==c9FFBW&9@A6NFwC5P2zxxXcUM#Z)h?EY98UB6a@qlO6lgB+44qTDIRb{q&n zEjz*bK8qL_Yc59{5=Yi&FZhCvn$8@BaGG@|SDs__k5^EsCK zYM}&Q+S(;;r1gTg8R$KrUf*A-L9&BaP|F+sczbRydV8m1xsMj)xB1iYUcYG6yEZEM za0c!sO=`1I4c|;FLcJT~aMsN;bmUc}cJt)w^R_qPw2mp5 z8?}lm`jyhwuZ>tW7>Fl|cY$3>5c}&%Doqk?WM7NiBWv3u=@_e8`tG@r_2jTJIvCMP z8L=k1zPOlnwr!!}`rSA`B^!KxD&VrZDY*XRb}SUwT3=;sUO)S?c)imrjr#T3uTZzq z1#>G(aYEfGT#=xJ3D!}dV{rhN8gmrkV+RVqH80~SrTVS3o46fTvHXDqV|#TryevU z=|kJ=A*pWOB-quu>hPnYqH-=@h-O)&X6Ut+%ql^c@$#g3eOh za@ig`#`=)Kep#Ag$D^{dCF@51cd);}-Q(d4k<6c;^(*vd_0uS_=GQXWDpO}pu*V@s*S(aU71m5jCWejhAx@4}aF z7vTG}Iy!yP0QKPC##IgND6oA8CIz?91=*QYAh-)}XJw<;qBCI1(a1WyyU55;Bh-

#@kY=I4PB+Efc4VJ^|ei)vTj|DE2q;iHH7LTG>-U&k0VT!Ho^{V~qp- z?we1?8Od3DmVbt@6OJ^M6{SP<_TmEEt9-M8V9}l;N^5@e4IY4KA0~LE&+C^ z_WB{lUrj~TcXnv^2ZadzsegNQ1UOdJ*fMTXs~PhcHy`Jd5~&0>-5?fRUW;IjT@iCc ztRMc+ZmPZ~$Hx*7``yk)KBO29etQfj*2tpe6*u%)XO7RZ7ozcSaTH$Th`I}Q<5JTY zJl&TK^+&y!#Ya;4oeNq}RLL9*=a2r=%WR=PNlZ{y)JjMoLeTF2Q6^ts_wVKrf)aNB zO;*&HnC4^N(g0fAI9Lo_QPnzk~)PB*er-<%e^Q1-*6u zRKwro-%kG9=JzJyJ>kELnEcV@=s&qMm6w)~