Skip to content

Commit

Permalink
Add onnxruntime 1.18.1 for Linux aarch64 GPU (#1914)
Browse files Browse the repository at this point in the history
  • Loading branch information
csukuangfj authored Feb 24, 2025
1 parent 7774e35 commit bafd110
Show file tree
Hide file tree
Showing 4 changed files with 25 additions and 2 deletions.
3 changes: 3 additions & 0 deletions .github/workflows/aarch64-linux-gnu-shared.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,9 @@ jobs:
- os: ubuntu-22.04-arm
gpu: ON
onnxruntime_version: "1.16.0"
- os: ubuntu-22.04-arm
gpu: ON
onnxruntime_version: "1.18.1"
- os: ubuntu-22.04-arm
gpu: OFF
onnxruntime_version: ""
Expand Down
3 changes: 2 additions & 1 deletion CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,8 @@ option(SHERPA_ONNX_USE_PRE_INSTALLED_ONNXRUNTIME_IF_AVAILABLE "True to use pre-i
option(SHERPA_ONNX_ENABLE_SANITIZER "Whether to enable ubsan and asan" OFF)
option(SHERPA_ONNX_BUILD_C_API_EXAMPLES "Whether to enable C API examples" ON)

set(SHERPA_ONNX_LINUX_ARM64_GPU_ONNXRUNTIME_VERSION "1.11.0" CACHE STRING "Used only for Linux ARM64 GPU. If you use Jetson nano b01, then please set it to 1.11.0. If you use Jetson Orin NX, then set it to 1.16.0")
set(SHERPA_ONNX_LINUX_ARM64_GPU_ONNXRUNTIME_VERSION "1.11.0" CACHE STRING "Used only for Linux ARM64 GPU. If you use Jetson nano b01, then please set it to 1.11.0. If you use Jetson Orin NX, then set it to 1.16.0.If you use NVIDIA Jetson Orin Nano Engineering Reference Developer Kit
Super - Jetpack 6.2 [L4T 36.4.3], then set it to 1.18.1")


set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/lib")
Expand Down
8 changes: 8 additions & 0 deletions build-aarch64-linux-gnu.sh
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,14 @@
# export SHERPA_ONNX_ENABLE_GPU=ON
# export SHERPA_ONNX_LINUX_ARM64_GPU_ONNXRUNTIME_VERSION=1.16.0
# ./build-aarch64-linux-gnu.sh
#
# (d) For NVIDIA Jetson Orin Nano Engineering Reference Developer Kit Super
# Jetpack 6.2 [L4T 36.4.3] (CUDA 12.6)
#
# export SHERPA_ONNX_ENABLE_GPU=ON
# export SHERPA_ONNX_LINUX_ARM64_GPU_ONNXRUNTIME_VERSION=1.18.1
# ./build-aarch64-linux-gnu.sh


if command -v aarch64-none-linux-gnu-gcc &> /dev/null; then
ln -svf $(which aarch64-none-linux-gnu-gcc) ./aarch64-linux-gnu-gcc
Expand Down
13 changes: 12 additions & 1 deletion cmake/onnxruntime-linux-aarch64-gpu.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,11 @@ to cmake (You need to make sure CUDA 10.2 is available on your board).
If you use Jetson Orin NX, then please pass
-DSHERPA_ONNX_LINUX_ARM64_GPU_ONNXRUNTIME_VERSION=1.16.0
to cmake (You need to make sure CUDA 11.4 is available on your board).
If you use NVIDIA Jetson Orin Nano Engineering Reference Developer Kit
Super - Jetpack 6.2 [L4T 36.4.3], then please pass
-DSHERPA_ONNX_LINUX_ARM64_GPU_ONNXRUNTIME_VERSION=1.18.1
to cmake (You need to make sure CUDA 12.6 is available on your board).
")

set(v ${SHERPA_ONNX_LINUX_ARM64_GPU_ONNXRUNTIME_VERSION})
Expand All @@ -36,8 +41,14 @@ set(onnxruntime_URL2 "https://hf-mirror.com/csukuangfj/onnxruntime-libs/resolve/

if(v STREQUAL "1.11.0")
set(onnxruntime_HASH "SHA256=36eded935551e23aead09d4173bdf0bd1e7b01fdec15d77f97d6e34029aa60d7")
else()
elseif(v STREQUAL "1.16.0")
set(onnxruntime_HASH "SHA256=4c09d5acf2c2682b4eab1dc2f1ad98fc1fde5f5f1960063e337983ba59379a4b")
elseif(v STREQUAL "1.18.1")
set(onnxruntime_URL "https://github.com/csukuangfj/onnxruntime-libs/releases/download/v1.18.1/onnxruntime-linux-aarch64-gpu-cuda12-1.18.1.tar.bz2")
set(onnxruntime_URL2 "https://hf-mirror.com/csukuangfj/onnxruntime-libs/resolve/main/onnxruntime-linux-aarch64-gpu-cuda12-1.18.1.tar.bz2")
set(onnxruntime_HASH "SHA256=1e91064ec13a6fabb6b670da8a2da4f369c1dbd50a5be77a879b2473e7afc0a6")
else()
message(FATAL_ERROR "Unuspported onnxruntime version ${v} for Linux aarch64")
endif()

# If you don't have access to the Internet,
Expand Down

0 comments on commit bafd110

Please sign in to comment.