From 16b9197c689546da557f4d708b2280ab7bb17d4f Mon Sep 17 00:00:00 2001 From: Edward Chen <18449977+edgchen1@users.noreply.github.com> Date: Wed, 7 Jul 2021 18:37:08 -0700 Subject: [PATCH] [iOS] Add SpeechRecognition example (#9) Add an iOS speech recognition example that uses the Wav2Vec 2.0 model. --- .../mobile-examples-pipeline.yml | 86 ++-- .../templates/use-python-step.yml | 7 + .../templates/xcode-build-and-test-step.yml | 21 + mobile/README.md | 36 +- mobile/examples/basic_usage/ios/Podfile | 2 +- .../examples/basic_usage/model/gen_model.sh | 1 - .../basic_usage/model/requirements.txt | 4 +- .../examples/speech_recognition/ios/Podfile | 12 + .../project.pbxproj | 484 ++++++++++++++++++ .../contents.xcworkspacedata | 7 + .../xcshareddata/IDEWorkspaceChecks.plist | 8 + .../AccentColor.colorset/Contents.json | 11 + .../AppIcon.appiconset/Contents.json | 98 ++++ .../Assets.xcassets/Contents.json | 6 + .../ios/SpeechRecognition/AudioRecorder.swift | 130 +++++ .../ios/SpeechRecognition/ContentView.swift | 61 +++ .../ios/SpeechRecognition/Info.plist | 52 ++ .../Preview Assets.xcassets/Contents.json | 6 + .../SpeechRecognition-Bridging-Header.h | 4 + .../SpeechRecognitionApp.swift | 13 + .../SpeechRecognition/SpeechRecognizer.swift | 97 ++++ .../ios/SpeechRecognitionTests/Info.plist | 22 + .../SpeechRecognitionTests.swift | 14 + .../ios/images/screenshot.png | Bin 0 -> 34359 bytes .../examples/speech_recognition/ios/readme.md | 41 ++ .../speech_recognition/model/gen_model.sh | 14 + .../speech_recognition/model/readme.md | 31 ++ .../speech_recognition/model/requirements.txt | 5 + .../speech_recognition/model/wav2vec2_gen.py | 22 + 29 files changed, 1250 insertions(+), 45 deletions(-) create mode 100644 ci_build/azure_pipelines/templates/use-python-step.yml create mode 100644 ci_build/azure_pipelines/templates/xcode-build-and-test-step.yml create mode 100644 mobile/examples/speech_recognition/ios/Podfile create mode 100644 mobile/examples/speech_recognition/ios/SpeechRecognition.xcodeproj/project.pbxproj create mode 100644 mobile/examples/speech_recognition/ios/SpeechRecognition.xcodeproj/project.xcworkspace/contents.xcworkspacedata create mode 100644 mobile/examples/speech_recognition/ios/SpeechRecognition.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist create mode 100644 mobile/examples/speech_recognition/ios/SpeechRecognition/Assets.xcassets/AccentColor.colorset/Contents.json create mode 100644 mobile/examples/speech_recognition/ios/SpeechRecognition/Assets.xcassets/AppIcon.appiconset/Contents.json create mode 100644 mobile/examples/speech_recognition/ios/SpeechRecognition/Assets.xcassets/Contents.json create mode 100644 mobile/examples/speech_recognition/ios/SpeechRecognition/AudioRecorder.swift create mode 100644 mobile/examples/speech_recognition/ios/SpeechRecognition/ContentView.swift create mode 100644 mobile/examples/speech_recognition/ios/SpeechRecognition/Info.plist create mode 100644 mobile/examples/speech_recognition/ios/SpeechRecognition/Preview Content/Preview Assets.xcassets/Contents.json create mode 100644 mobile/examples/speech_recognition/ios/SpeechRecognition/SpeechRecognition-Bridging-Header.h create mode 100644 mobile/examples/speech_recognition/ios/SpeechRecognition/SpeechRecognitionApp.swift create mode 100644 mobile/examples/speech_recognition/ios/SpeechRecognition/SpeechRecognizer.swift create mode 100644 mobile/examples/speech_recognition/ios/SpeechRecognitionTests/Info.plist create mode 100644 mobile/examples/speech_recognition/ios/SpeechRecognitionTests/SpeechRecognitionTests.swift create mode 100644 mobile/examples/speech_recognition/ios/images/screenshot.png create mode 100644 mobile/examples/speech_recognition/ios/readme.md create mode 100755 mobile/examples/speech_recognition/model/gen_model.sh create mode 100644 mobile/examples/speech_recognition/model/readme.md create mode 100644 mobile/examples/speech_recognition/model/requirements.txt create mode 100644 mobile/examples/speech_recognition/model/wav2vec2_gen.py diff --git a/ci_build/azure_pipelines/mobile-examples-pipeline.yml b/ci_build/azure_pipelines/mobile-examples-pipeline.yml index 03019beb1d5cb..d04e418f9f3f9 100644 --- a/ci_build/azure_pipelines/mobile-examples-pipeline.yml +++ b/ci_build/azure_pipelines/mobile-examples-pipeline.yml @@ -1,37 +1,49 @@ -jobs: -- job: BasicUsageIos - pool: - vmImage: "macOS-10.15" - - steps: - - task: UsePythonVersion@0 - inputs: - versionSpec: '3.6' - addToPath: true - architecture: 'x64' - - - script: | - set -e - pip install -r ../model/requirements.txt - ../model/gen_model.sh ./OrtBasicUsage/model - workingDirectory: mobile/examples/basic_usage/ios - displayName: "Generate model" - - - task: CocoaPods@0 - inputs: - workingDirectory: 'mobile/examples/basic_usage/ios' - forceRepoUpdate: false - - - task: Xcode@5 - inputs: - actions: 'test' - configuration: 'Debug' - sdk: 'iphonesimulator' - xcWorkspacePath: 'mobile/examples/basic_usage/ios/OrtBasicUsage.xcworkspace' - scheme: 'OrtBasicUsage' - xcodeVersion: 'specifyPath' - xcodeDeveloperDir: '/Applications/Xcode_12.4.app/Contents/Developer' - packageApp: false - destinationPlatformOption: 'iOS' - destinationTypeOption: 'simulators' - destinationSimulators: 'iPhone 8' +jobs: + +# mobile/examples/basic_usage/ios +- job: BasicUsageIos + pool: + vmImage: "macOS-10.15" + + steps: + - template: templates/use-python-step.yml + + - bash: | + set -e + pip install -r ../model/requirements.txt + ../model/gen_model.sh ./OrtBasicUsage/model + workingDirectory: mobile/examples/basic_usage/ios + displayName: "Generate model" + + - script: pod install + workingDirectory: 'mobile/examples/basic_usage/ios' + displayName: "Install CocoaPods pods" + + - template: templates/xcode-build-and-test-step.yml + parameters: + xcWorkspacePath: 'mobile/examples/basic_usage/ios/OrtBasicUsage.xcworkspace' + scheme: 'OrtBasicUsage' + +# mobile/examples/speech_recognition/ios +- job: SpeechRecognitionIos + pool: + vmImage: "macOS-10.15" + + steps: + - template: templates/use-python-step.yml + + - bash: | + set -e + pip install -r ../model/requirements.txt + ../model/gen_model.sh ./SpeechRecognition/model + workingDirectory: mobile/examples/speech_recognition/ios + displayName: "Generate model" + + - script: pod install + workingDirectory: 'mobile/examples/speech_recognition/ios' + displayName: "Install CocoaPods pods" + + - template: templates/xcode-build-and-test-step.yml + parameters: + xcWorkspacePath: 'mobile/examples/speech_recognition/ios/SpeechRecognition.xcworkspace' + scheme: 'SpeechRecognition' diff --git a/ci_build/azure_pipelines/templates/use-python-step.yml b/ci_build/azure_pipelines/templates/use-python-step.yml new file mode 100644 index 0000000000000..3610547254ecd --- /dev/null +++ b/ci_build/azure_pipelines/templates/use-python-step.yml @@ -0,0 +1,7 @@ +steps: +- task: UsePythonVersion@0 + inputs: + versionSpec: '3.6' + addToPath: true + architecture: 'x64' + displayName: "Use Python 3.6" diff --git a/ci_build/azure_pipelines/templates/xcode-build-and-test-step.yml b/ci_build/azure_pipelines/templates/xcode-build-and-test-step.yml new file mode 100644 index 0000000000000..1338916e68953 --- /dev/null +++ b/ci_build/azure_pipelines/templates/xcode-build-and-test-step.yml @@ -0,0 +1,21 @@ +parameters: +- name: xcWorkspacePath + type: string +- name: scheme + type: string + +steps: +- task: Xcode@5 + inputs: + actions: 'test' + configuration: 'Debug' + sdk: 'iphonesimulator' + xcWorkspacePath: '${{ parameters.xcWorkspacePath }}' + scheme: '${{ parameters.scheme }}' + xcodeVersion: 'specifyPath' + xcodeDeveloperDir: '/Applications/Xcode_12.4.app/Contents/Developer' + packageApp: false + destinationPlatformOption: 'iOS' + destinationTypeOption: 'simulators' + destinationSimulators: 'iPhone 8' + displayName: "Xcode build and test" diff --git a/mobile/README.md b/mobile/README.md index 500ddcd2aafe1..a48aa8f899881 100644 --- a/mobile/README.md +++ b/mobile/README.md @@ -1,15 +1,43 @@ -# ONNX Runtime Mobile examples +# ONNX Runtime Mobile Examples -The following examples demonstrate how to use ONNX Runtime (ORT) Mobile in mobile applications. +These examples demonstrate how to use ONNX Runtime (ORT) Mobile in mobile applications. -## Basic usage +## General Prerequisites + +These are some general prerequisites. +Examples may specify other requirements if applicable. +Please refer to the instructions for each example. + +### Get the Code + +Clone this repo. + +```bash +git clone https://github.com/microsoft/onnxruntime-inference-examples.git +``` + +### iOS Example Prerequisites + +- Xcode 12.4+ +- CocoaPods +- A valid Apple Developer ID if you want to run the example on a device + +## Examples + +### Basic Usage The example app shows basic usage of the ORT APIs. - [iOS Basic Usage](examples/basic_usage/ios) -## Image classification +### Image Classification The example app uses image classification which is able to continuously classify the objects it sees from the device's camera in real-time and displays the most probable inference results on the screen. - [Android Image Classifier](examples/image_classifications/android) + +### Speech Recognition + +The example app uses speech recognition to transcribe speech from audio recorded by the device. + +- [iOS Speech Recognition](examples/speech_recognition/ios) diff --git a/mobile/examples/basic_usage/ios/Podfile b/mobile/examples/basic_usage/ios/Podfile index 89aff2e6a89fc..ad3801c5b0edc 100644 --- a/mobile/examples/basic_usage/ios/Podfile +++ b/mobile/examples/basic_usage/ios/Podfile @@ -3,7 +3,7 @@ platform :ios, '11.0' target 'OrtBasicUsage' do use_frameworks! - pod 'onnxruntime-mobile-objc', '1.8.0-preview' + pod 'onnxruntime-mobile-objc' target 'OrtBasicUsageTests' do inherit! :search_paths diff --git a/mobile/examples/basic_usage/model/gen_model.sh b/mobile/examples/basic_usage/model/gen_model.sh index 1f4adcfbe15f0..665c8c9b19aba 100755 --- a/mobile/examples/basic_usage/model/gen_model.sh +++ b/mobile/examples/basic_usage/model/gen_model.sh @@ -12,4 +12,3 @@ cd ${OUTPUT_DIR} python3 ${DIR}/single_add_gen.py python3 -m onnxruntime.tools.convert_onnx_models_to_ort . - diff --git a/mobile/examples/basic_usage/model/requirements.txt b/mobile/examples/basic_usage/model/requirements.txt index c98065b9d4b9c..4757a3717e3d8 100644 --- a/mobile/examples/basic_usage/model/requirements.txt +++ b/mobile/examples/basic_usage/model/requirements.txt @@ -1,2 +1,2 @@ -onnx==1.9.0 -onnxruntime==1.8.0 +onnx>=1.9.0 +onnxruntime>=1.8.0 diff --git a/mobile/examples/speech_recognition/ios/Podfile b/mobile/examples/speech_recognition/ios/Podfile new file mode 100644 index 0000000000000..7190ed4ffe162 --- /dev/null +++ b/mobile/examples/speech_recognition/ios/Podfile @@ -0,0 +1,12 @@ +platform :ios, '11.0' + +target 'SpeechRecognition' do + use_frameworks! + + pod 'onnxruntime-mobile-objc' + + target 'SpeechRecognitionTests' do + inherit! :search_paths + end + +end diff --git a/mobile/examples/speech_recognition/ios/SpeechRecognition.xcodeproj/project.pbxproj b/mobile/examples/speech_recognition/ios/SpeechRecognition.xcodeproj/project.pbxproj new file mode 100644 index 0000000000000..eb6d8f4084e6a --- /dev/null +++ b/mobile/examples/speech_recognition/ios/SpeechRecognition.xcodeproj/project.pbxproj @@ -0,0 +1,484 @@ +// !$*UTF8*$! +{ + archiveVersion = 1; + classes = { + }; + objectVersion = 51; + objects = { + +/* Begin PBXBuildFile section */ + EFE237D726855E4600234E2C /* SpeechRecognitionApp.swift in Sources */ = {isa = PBXBuildFile; fileRef = EFE237D626855E4600234E2C /* SpeechRecognitionApp.swift */; }; + EFE237D926855E4600234E2C /* ContentView.swift in Sources */ = {isa = PBXBuildFile; fileRef = EFE237D826855E4600234E2C /* ContentView.swift */; }; + EFE237DB26855E4B00234E2C /* Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = EFE237DA26855E4B00234E2C /* Assets.xcassets */; }; + EFE237DE26855E4B00234E2C /* Preview Assets.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = EFE237DD26855E4B00234E2C /* Preview Assets.xcassets */; }; + EFE237E926855E4B00234E2C /* SpeechRecognitionTests.swift in Sources */ = {isa = PBXBuildFile; fileRef = EFE237E826855E4B00234E2C /* SpeechRecognitionTests.swift */; }; + EFE2380226855FB900234E2C /* AudioRecorder.swift in Sources */ = {isa = PBXBuildFile; fileRef = EFE2380126855FB900234E2C /* AudioRecorder.swift */; }; + EFE2380526855FD700234E2C /* SpeechRecognizer.swift in Sources */ = {isa = PBXBuildFile; fileRef = EFE2380426855FD700234E2C /* SpeechRecognizer.swift */; }; + EFE238072685608A00234E2C /* wav2vec2-base-960h.all.ort in Resources */ = {isa = PBXBuildFile; fileRef = EFE238062685608A00234E2C /* wav2vec2-base-960h.all.ort */; }; +/* End PBXBuildFile section */ + +/* Begin PBXContainerItemProxy section */ + EFE237E526855E4B00234E2C /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = EFE237CB26855E4600234E2C /* Project object */; + proxyType = 1; + remoteGlobalIDString = EFE237D226855E4600234E2C; + remoteInfo = SpeechRecognition; + }; +/* End PBXContainerItemProxy section */ + +/* Begin PBXFileReference section */ + EFE237D326855E4600234E2C /* SpeechRecognition.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = SpeechRecognition.app; sourceTree = BUILT_PRODUCTS_DIR; }; + EFE237D626855E4600234E2C /* SpeechRecognitionApp.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SpeechRecognitionApp.swift; sourceTree = ""; }; + EFE237D826855E4600234E2C /* ContentView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ContentView.swift; sourceTree = ""; }; + EFE237DA26855E4B00234E2C /* Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = Assets.xcassets; sourceTree = ""; }; + EFE237DD26855E4B00234E2C /* Preview Assets.xcassets */ = {isa = PBXFileReference; lastKnownFileType = folder.assetcatalog; path = "Preview Assets.xcassets"; sourceTree = ""; }; + EFE237DF26855E4B00234E2C /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; + EFE237E426855E4B00234E2C /* SpeechRecognitionTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = SpeechRecognitionTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; }; + EFE237E826855E4B00234E2C /* SpeechRecognitionTests.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SpeechRecognitionTests.swift; sourceTree = ""; }; + EFE237EA26855E4B00234E2C /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = ""; }; + EFE2380126855FB900234E2C /* AudioRecorder.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = AudioRecorder.swift; sourceTree = ""; }; + EFE2380326855FC800234E2C /* SpeechRecognition-Bridging-Header.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = "SpeechRecognition-Bridging-Header.h"; sourceTree = ""; }; + EFE2380426855FD700234E2C /* SpeechRecognizer.swift */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.swift; path = SpeechRecognizer.swift; sourceTree = ""; }; + EFE238062685608A00234E2C /* wav2vec2-base-960h.all.ort */ = {isa = PBXFileReference; lastKnownFileType = file; name = "wav2vec2-base-960h.all.ort"; path = "model/wav2vec2-base-960h.all.ort"; sourceTree = ""; }; +/* End PBXFileReference section */ + +/* Begin PBXFrameworksBuildPhase section */ + EFE237D026855E4600234E2C /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; + EFE237E126855E4B00234E2C /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXFrameworksBuildPhase section */ + +/* Begin PBXGroup section */ + BE5303DF73A9410AA3E0E0EA /* Pods */ = { + isa = PBXGroup; + children = ( + ); + path = Pods; + sourceTree = ""; + }; + EFE237CA26855E4600234E2C = { + isa = PBXGroup; + children = ( + EFE237D526855E4600234E2C /* SpeechRecognition */, + EFE237E726855E4B00234E2C /* SpeechRecognitionTests */, + EFE237D426855E4600234E2C /* Products */, + BE5303DF73A9410AA3E0E0EA /* Pods */, + ); + sourceTree = ""; + }; + EFE237D426855E4600234E2C /* Products */ = { + isa = PBXGroup; + children = ( + EFE237D326855E4600234E2C /* SpeechRecognition.app */, + EFE237E426855E4B00234E2C /* SpeechRecognitionTests.xctest */, + ); + name = Products; + sourceTree = ""; + }; + EFE237D526855E4600234E2C /* SpeechRecognition */ = { + isa = PBXGroup; + children = ( + EFE2380126855FB900234E2C /* AudioRecorder.swift */, + EFE2380426855FD700234E2C /* SpeechRecognizer.swift */, + EFE2380326855FC800234E2C /* SpeechRecognition-Bridging-Header.h */, + EFE237D626855E4600234E2C /* SpeechRecognitionApp.swift */, + EFE237D826855E4600234E2C /* ContentView.swift */, + EFE237DA26855E4B00234E2C /* Assets.xcassets */, + EFE237DF26855E4B00234E2C /* Info.plist */, + EFE238062685608A00234E2C /* wav2vec2-base-960h.all.ort */, + EFE237DC26855E4B00234E2C /* Preview Content */, + ); + path = SpeechRecognition; + sourceTree = ""; + }; + EFE237DC26855E4B00234E2C /* Preview Content */ = { + isa = PBXGroup; + children = ( + EFE237DD26855E4B00234E2C /* Preview Assets.xcassets */, + ); + path = "Preview Content"; + sourceTree = ""; + }; + EFE237E726855E4B00234E2C /* SpeechRecognitionTests */ = { + isa = PBXGroup; + children = ( + EFE237E826855E4B00234E2C /* SpeechRecognitionTests.swift */, + EFE237EA26855E4B00234E2C /* Info.plist */, + ); + path = SpeechRecognitionTests; + sourceTree = ""; + }; +/* End PBXGroup section */ + +/* Begin PBXNativeTarget section */ + EFE237D226855E4600234E2C /* SpeechRecognition */ = { + isa = PBXNativeTarget; + buildConfigurationList = EFE237F826855E4B00234E2C /* Build configuration list for PBXNativeTarget "SpeechRecognition" */; + buildPhases = ( + EFE237CF26855E4600234E2C /* Sources */, + EFE237D026855E4600234E2C /* Frameworks */, + EFE237D126855E4600234E2C /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + ); + name = SpeechRecognition; + productName = SpeechRecognition; + productReference = EFE237D326855E4600234E2C /* SpeechRecognition.app */; + productType = "com.apple.product-type.application"; + }; + EFE237E326855E4B00234E2C /* SpeechRecognitionTests */ = { + isa = PBXNativeTarget; + buildConfigurationList = EFE237FB26855E4B00234E2C /* Build configuration list for PBXNativeTarget "SpeechRecognitionTests" */; + buildPhases = ( + EFE237E026855E4B00234E2C /* Sources */, + EFE237E126855E4B00234E2C /* Frameworks */, + EFE237E226855E4B00234E2C /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + EFE237E626855E4B00234E2C /* PBXTargetDependency */, + ); + name = SpeechRecognitionTests; + productName = SpeechRecognitionTests; + productReference = EFE237E426855E4B00234E2C /* SpeechRecognitionTests.xctest */; + productType = "com.apple.product-type.bundle.unit-test"; + }; +/* End PBXNativeTarget section */ + +/* Begin PBXProject section */ + EFE237CB26855E4600234E2C /* Project object */ = { + isa = PBXProject; + attributes = { + LastSwiftUpdateCheck = 1250; + LastUpgradeCheck = 1250; + TargetAttributes = { + EFE237D226855E4600234E2C = { + CreatedOnToolsVersion = 12.5.1; + }; + EFE237E326855E4B00234E2C = { + CreatedOnToolsVersion = 12.5.1; + TestTargetID = EFE237D226855E4600234E2C; + }; + }; + }; + buildConfigurationList = EFE237CE26855E4600234E2C /* Build configuration list for PBXProject "SpeechRecognition" */; + compatibilityVersion = "Xcode 9.3"; + developmentRegion = en; + hasScannedForEncodings = 0; + knownRegions = ( + en, + Base, + ); + mainGroup = EFE237CA26855E4600234E2C; + productRefGroup = EFE237D426855E4600234E2C /* Products */; + projectDirPath = ""; + projectRoot = ""; + targets = ( + EFE237D226855E4600234E2C /* SpeechRecognition */, + EFE237E326855E4B00234E2C /* SpeechRecognitionTests */, + ); + }; +/* End PBXProject section */ + +/* Begin PBXResourcesBuildPhase section */ + EFE237D126855E4600234E2C /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + EFE238072685608A00234E2C /* wav2vec2-base-960h.all.ort in Resources */, + EFE237DE26855E4B00234E2C /* Preview Assets.xcassets in Resources */, + EFE237DB26855E4B00234E2C /* Assets.xcassets in Resources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; + EFE237E226855E4B00234E2C /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXResourcesBuildPhase section */ + +/* Begin PBXSourcesBuildPhase section */ + EFE237CF26855E4600234E2C /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + EFE2380226855FB900234E2C /* AudioRecorder.swift in Sources */, + EFE2380526855FD700234E2C /* SpeechRecognizer.swift in Sources */, + EFE237D926855E4600234E2C /* ContentView.swift in Sources */, + EFE237D726855E4600234E2C /* SpeechRecognitionApp.swift in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; + EFE237E026855E4B00234E2C /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + EFE237E926855E4B00234E2C /* SpeechRecognitionTests.swift in Sources */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXSourcesBuildPhase section */ + +/* Begin PBXTargetDependency section */ + EFE237E626855E4B00234E2C /* PBXTargetDependency */ = { + isa = PBXTargetDependency; + target = EFE237D226855E4600234E2C /* SpeechRecognition */; + targetProxy = EFE237E526855E4B00234E2C /* PBXContainerItemProxy */; + }; +/* End PBXTargetDependency section */ + +/* Begin XCBuildConfiguration section */ + EFE237F626855E4B00234E2C /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = dwarf; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_TESTABILITY = YES; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_DYNAMIC_NO_PIC = NO; + GCC_NO_COMMON_BLOCKS = YES; + GCC_OPTIMIZATION_LEVEL = 0; + GCC_PREPROCESSOR_DEFINITIONS = ( + "DEBUG=1", + "$(inherited)", + ); + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 14.5; + MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; + MTL_FAST_MATH = YES; + ONLY_ACTIVE_ARCH = YES; + SDKROOT = iphoneos; + SWIFT_ACTIVE_COMPILATION_CONDITIONS = DEBUG; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + }; + name = Debug; + }; + EFE237F726855E4B00234E2C /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++14"; + CLANG_CXX_LIBRARY = "libc++"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + GCC_C_LANGUAGE_STANDARD = gnu11; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 14.5; + MTL_ENABLE_DEBUG_INFO = NO; + MTL_FAST_MATH = YES; + SDKROOT = iphoneos; + SWIFT_COMPILATION_MODE = wholemodule; + SWIFT_OPTIMIZATION_LEVEL = "-O"; + VALIDATE_PRODUCT = YES; + }; + name = Release; + }; + EFE237F926855E4B00234E2C /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CODE_SIGN_STYLE = Automatic; + DEVELOPMENT_ASSET_PATHS = "\"SpeechRecognition/Preview Content\""; + ENABLE_PREVIEWS = YES; + INFOPLIST_FILE = SpeechRecognition/Info.plist; + IPHONEOS_DEPLOYMENT_TARGET = 14.0; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + PRODUCT_BUNDLE_IDENTIFIER = com.onnxruntime.SpeechRecognition; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_OBJC_BRIDGING_HEADER = "SpeechRecognition/SpeechRecognition-Bridging-Header.h"; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = 1; + }; + name = Debug; + }; + EFE237FA26855E4B00234E2C /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CODE_SIGN_STYLE = Automatic; + DEVELOPMENT_ASSET_PATHS = "\"SpeechRecognition/Preview Content\""; + ENABLE_PREVIEWS = YES; + INFOPLIST_FILE = SpeechRecognition/Info.plist; + IPHONEOS_DEPLOYMENT_TARGET = 14.0; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + PRODUCT_BUNDLE_IDENTIFIER = com.onnxruntime.SpeechRecognition; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_OBJC_BRIDGING_HEADER = "SpeechRecognition/SpeechRecognition-Bridging-Header.h"; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = 1; + }; + name = Release; + }; + EFE237FC26855E4B00234E2C /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES; + BUNDLE_LOADER = "$(TEST_HOST)"; + CODE_SIGN_STYLE = Automatic; + INFOPLIST_FILE = SpeechRecognitionTests/Info.plist; + IPHONEOS_DEPLOYMENT_TARGET = 14.0; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + "@loader_path/Frameworks", + ); + PRODUCT_BUNDLE_IDENTIFIER = com.onnxruntime.SpeechRecognitionTests; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + TEST_HOST = "$(BUILT_PRODUCTS_DIR)/SpeechRecognition.app/SpeechRecognition"; + }; + name = Debug; + }; + EFE237FD26855E4B00234E2C /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES = YES; + BUNDLE_LOADER = "$(TEST_HOST)"; + CODE_SIGN_STYLE = Automatic; + INFOPLIST_FILE = SpeechRecognitionTests/Info.plist; + IPHONEOS_DEPLOYMENT_TARGET = 14.0; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + "@loader_path/Frameworks", + ); + PRODUCT_BUNDLE_IDENTIFIER = com.onnxruntime.SpeechRecognitionTests; + PRODUCT_NAME = "$(TARGET_NAME)"; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + TEST_HOST = "$(BUILT_PRODUCTS_DIR)/SpeechRecognition.app/SpeechRecognition"; + }; + name = Release; + }; +/* End XCBuildConfiguration section */ + +/* Begin XCConfigurationList section */ + EFE237CE26855E4600234E2C /* Build configuration list for PBXProject "SpeechRecognition" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + EFE237F626855E4B00234E2C /* Debug */, + EFE237F726855E4B00234E2C /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + EFE237F826855E4B00234E2C /* Build configuration list for PBXNativeTarget "SpeechRecognition" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + EFE237F926855E4B00234E2C /* Debug */, + EFE237FA26855E4B00234E2C /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + EFE237FB26855E4B00234E2C /* Build configuration list for PBXNativeTarget "SpeechRecognitionTests" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + EFE237FC26855E4B00234E2C /* Debug */, + EFE237FD26855E4B00234E2C /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; +/* End XCConfigurationList section */ + }; + rootObject = EFE237CB26855E4600234E2C /* Project object */; +} diff --git a/mobile/examples/speech_recognition/ios/SpeechRecognition.xcodeproj/project.xcworkspace/contents.xcworkspacedata b/mobile/examples/speech_recognition/ios/SpeechRecognition.xcodeproj/project.xcworkspace/contents.xcworkspacedata new file mode 100644 index 0000000000000..919434a6254f0 --- /dev/null +++ b/mobile/examples/speech_recognition/ios/SpeechRecognition.xcodeproj/project.xcworkspace/contents.xcworkspacedata @@ -0,0 +1,7 @@ + + + + + diff --git a/mobile/examples/speech_recognition/ios/SpeechRecognition.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist b/mobile/examples/speech_recognition/ios/SpeechRecognition.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist new file mode 100644 index 0000000000000..18d981003d68d --- /dev/null +++ b/mobile/examples/speech_recognition/ios/SpeechRecognition.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist @@ -0,0 +1,8 @@ + + + + + IDEDidComputeMac32BitWarning + + + diff --git a/mobile/examples/speech_recognition/ios/SpeechRecognition/Assets.xcassets/AccentColor.colorset/Contents.json b/mobile/examples/speech_recognition/ios/SpeechRecognition/Assets.xcassets/AccentColor.colorset/Contents.json new file mode 100644 index 0000000000000..eb87897008164 --- /dev/null +++ b/mobile/examples/speech_recognition/ios/SpeechRecognition/Assets.xcassets/AccentColor.colorset/Contents.json @@ -0,0 +1,11 @@ +{ + "colors" : [ + { + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/mobile/examples/speech_recognition/ios/SpeechRecognition/Assets.xcassets/AppIcon.appiconset/Contents.json b/mobile/examples/speech_recognition/ios/SpeechRecognition/Assets.xcassets/AppIcon.appiconset/Contents.json new file mode 100644 index 0000000000000..9221b9bb1a35f --- /dev/null +++ b/mobile/examples/speech_recognition/ios/SpeechRecognition/Assets.xcassets/AppIcon.appiconset/Contents.json @@ -0,0 +1,98 @@ +{ + "images" : [ + { + "idiom" : "iphone", + "scale" : "2x", + "size" : "20x20" + }, + { + "idiom" : "iphone", + "scale" : "3x", + "size" : "20x20" + }, + { + "idiom" : "iphone", + "scale" : "2x", + "size" : "29x29" + }, + { + "idiom" : "iphone", + "scale" : "3x", + "size" : "29x29" + }, + { + "idiom" : "iphone", + "scale" : "2x", + "size" : "40x40" + }, + { + "idiom" : "iphone", + "scale" : "3x", + "size" : "40x40" + }, + { + "idiom" : "iphone", + "scale" : "2x", + "size" : "60x60" + }, + { + "idiom" : "iphone", + "scale" : "3x", + "size" : "60x60" + }, + { + "idiom" : "ipad", + "scale" : "1x", + "size" : "20x20" + }, + { + "idiom" : "ipad", + "scale" : "2x", + "size" : "20x20" + }, + { + "idiom" : "ipad", + "scale" : "1x", + "size" : "29x29" + }, + { + "idiom" : "ipad", + "scale" : "2x", + "size" : "29x29" + }, + { + "idiom" : "ipad", + "scale" : "1x", + "size" : "40x40" + }, + { + "idiom" : "ipad", + "scale" : "2x", + "size" : "40x40" + }, + { + "idiom" : "ipad", + "scale" : "1x", + "size" : "76x76" + }, + { + "idiom" : "ipad", + "scale" : "2x", + "size" : "76x76" + }, + { + "idiom" : "ipad", + "scale" : "2x", + "size" : "83.5x83.5" + }, + { + "idiom" : "ios-marketing", + "scale" : "1x", + "size" : "1024x1024" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/mobile/examples/speech_recognition/ios/SpeechRecognition/Assets.xcassets/Contents.json b/mobile/examples/speech_recognition/ios/SpeechRecognition/Assets.xcassets/Contents.json new file mode 100644 index 0000000000000..73c00596a7fca --- /dev/null +++ b/mobile/examples/speech_recognition/ios/SpeechRecognition/Assets.xcassets/Contents.json @@ -0,0 +1,6 @@ +{ + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/mobile/examples/speech_recognition/ios/SpeechRecognition/AudioRecorder.swift b/mobile/examples/speech_recognition/ios/SpeechRecognition/AudioRecorder.swift new file mode 100644 index 0000000000000..3846727acec67 --- /dev/null +++ b/mobile/examples/speech_recognition/ios/SpeechRecognition/AudioRecorder.swift @@ -0,0 +1,130 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +import AVFoundation +import Foundation + +private let kSampleRate: Int = 16000 +private let kRecordingDuration: TimeInterval = 10 + +class AudioRecorder { + typealias RecordingBufferAndData = (buffer: AVAudioBuffer, data: Data) + typealias RecordResult = Result + typealias RecordingDoneCallback = (RecordResult) -> Void + + enum AudioRecorderError: Error { + case Error(message: String) + } + + func record(callback: @escaping RecordingDoneCallback) { + let session = AVAudioSession.sharedInstance() + session.requestRecordPermission { allowed in + do { + guard allowed else { + throw AudioRecorderError.Error(message: "Recording permission denied.") + } + + try session.setCategory(.record) + try session.setActive(true) + + let tempDir = FileManager.default.temporaryDirectory + + let recordingUrl = tempDir.appendingPathComponent("recording.wav") + + let formatSettings: [String: Any] = [ + AVFormatIDKey: kAudioFormatLinearPCM, + AVSampleRateKey: kSampleRate, + AVNumberOfChannelsKey: 1, + AVLinearPCMBitDepthKey: 16, + AVLinearPCMIsBigEndianKey: false, + AVLinearPCMIsFloatKey: false, + AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue, + ] + + let recorder = try AVAudioRecorder(url: recordingUrl, settings: formatSettings) + self.recorder = recorder + + let delegate = RecorderDelegate(callback: callback) + recorder.delegate = delegate + self.recorderDelegate = delegate + + guard recorder.record(forDuration: kRecordingDuration) else { + throw AudioRecorderError.Error(message: "Failed to record.") + } + + // control should resume in recorder.delegate.audioRecorderDidFinishRecording() + } catch { + callback(.failure(error)) + } + } + } + + private var recorderDelegate: RecorderDelegate? + private var recorder: AVAudioRecorder? + + private class RecorderDelegate: NSObject, AVAudioRecorderDelegate { + private let callback: RecordingDoneCallback + + init(callback: @escaping RecordingDoneCallback) { + self.callback = callback + } + + func audioRecorderDidFinishRecording( + _ recorder: AVAudioRecorder, + successfully flag: Bool + ) { + let recordResult = RecordResult { () -> RecordingBufferAndData in + guard flag else { + throw AudioRecorderError.Error(message: "Recording was unsuccessful.") + } + + let recordingUrl = recorder.url + let recordingFile = try AVAudioFile(forReading: recordingUrl) + + guard + let format = AVAudioFormat( + commonFormat: .pcmFormatFloat32, + sampleRate: recordingFile.fileFormat.sampleRate, + channels: 1, + interleaved: false) + else { + throw AudioRecorderError.Error(message: "Failed to create audio format.") + } + + guard + let recordingBuffer = AVAudioPCMBuffer( + pcmFormat: format, + frameCapacity: AVAudioFrameCount(recordingFile.length)) + else { + throw AudioRecorderError.Error(message: "Failed to create audio buffer.") + } + + try recordingFile.read(into: recordingBuffer) + + guard let recordingFloatChannelData = recordingBuffer.floatChannelData else { + throw AudioRecorderError.Error(message: "Failed to get float channel data.") + } + + let recordingData = Data( + bytesNoCopy: recordingFloatChannelData[0], + count: Int(recordingBuffer.frameLength) * MemoryLayout.size, + deallocator: .none) + + return (recordingBuffer, recordingData) + } + + callback(recordResult) + } + + func audioRecorderEncodeErrorDidOccur( + _ recorder: AVAudioRecorder, + error: Error? + ) { + if let error = error { + callback(.failure(error)) + } else { + callback(.failure(AudioRecorderError.Error(message: "Encoding was unsuccessful."))) + } + } + } +} diff --git a/mobile/examples/speech_recognition/ios/SpeechRecognition/ContentView.swift b/mobile/examples/speech_recognition/ios/SpeechRecognition/ContentView.swift new file mode 100644 index 0000000000000..105bf935aecc4 --- /dev/null +++ b/mobile/examples/speech_recognition/ios/SpeechRecognition/ContentView.swift @@ -0,0 +1,61 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +import SwiftUI + +struct ContentView: View { + private let audioRecorder = AudioRecorder() + private let speechRecognizer = try! SpeechRecognizer() + + @State private var message: String = "" + @State private var successful: Bool = true + + @State private var readyToRecord: Bool = true + + private func recordAndRecognize() { + audioRecorder.record { recordResult in + let recognizeResult = recordResult.flatMap { recordingBufferAndData in + return speechRecognizer.evaluate(inputData: recordingBufferAndData.data) + } + endRecordAndRecognize(recognizeResult) + } + } + + private func endRecordAndRecognize(_ result: Result) { + DispatchQueue.main.async { + switch result { + case .success(let transcription): + message = transcription + successful = true + case .failure(let error): + message = "Error: \(error)" + successful = false + } + readyToRecord = true + } + } + + var body: some View { + VStack { + Text("Press \"Record\", say something, and get recognized!") + .padding() + + Button("Record") { + readyToRecord = false + recordAndRecognize() + } + .padding() + .disabled(!readyToRecord) + + Text("\(message)") + .foregroundColor(successful ? .none : .red) + .padding() + } + } +} + +struct ContentView_Previews: PreviewProvider { + static var previews: some View { + ContentView() + } +} diff --git a/mobile/examples/speech_recognition/ios/SpeechRecognition/Info.plist b/mobile/examples/speech_recognition/ios/SpeechRecognition/Info.plist new file mode 100644 index 0000000000000..b4f95e235e355 --- /dev/null +++ b/mobile/examples/speech_recognition/ios/SpeechRecognition/Info.plist @@ -0,0 +1,52 @@ + + + + + CFBundleDevelopmentRegion + $(DEVELOPMENT_LANGUAGE) + CFBundleExecutable + $(EXECUTABLE_NAME) + CFBundleIdentifier + $(PRODUCT_BUNDLE_IDENTIFIER) + CFBundleInfoDictionaryVersion + 6.0 + CFBundleName + $(PRODUCT_NAME) + CFBundlePackageType + $(PRODUCT_BUNDLE_PACKAGE_TYPE) + CFBundleShortVersionString + 1.0 + CFBundleVersion + 1 + LSRequiresIPhoneOS + + UIApplicationSceneManifest + + UIApplicationSupportsMultipleScenes + + + UIApplicationSupportsIndirectInputEvents + + UILaunchScreen + + UIRequiredDeviceCapabilities + + armv7 + + UISupportedInterfaceOrientations + + UIInterfaceOrientationPortrait + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + UISupportedInterfaceOrientations~ipad + + UIInterfaceOrientationPortrait + UIInterfaceOrientationPortraitUpsideDown + UIInterfaceOrientationLandscapeLeft + UIInterfaceOrientationLandscapeRight + + NSMicrophoneUsageDescription + Audio is recorded for speech recognition. + + diff --git a/mobile/examples/speech_recognition/ios/SpeechRecognition/Preview Content/Preview Assets.xcassets/Contents.json b/mobile/examples/speech_recognition/ios/SpeechRecognition/Preview Content/Preview Assets.xcassets/Contents.json new file mode 100644 index 0000000000000..73c00596a7fca --- /dev/null +++ b/mobile/examples/speech_recognition/ios/SpeechRecognition/Preview Content/Preview Assets.xcassets/Contents.json @@ -0,0 +1,6 @@ +{ + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/mobile/examples/speech_recognition/ios/SpeechRecognition/SpeechRecognition-Bridging-Header.h b/mobile/examples/speech_recognition/ios/SpeechRecognition/SpeechRecognition-Bridging-Header.h new file mode 100644 index 0000000000000..021bfc4d277e7 --- /dev/null +++ b/mobile/examples/speech_recognition/ios/SpeechRecognition/SpeechRecognition-Bridging-Header.h @@ -0,0 +1,4 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +#import diff --git a/mobile/examples/speech_recognition/ios/SpeechRecognition/SpeechRecognitionApp.swift b/mobile/examples/speech_recognition/ios/SpeechRecognition/SpeechRecognitionApp.swift new file mode 100644 index 0000000000000..1a00df92b56d5 --- /dev/null +++ b/mobile/examples/speech_recognition/ios/SpeechRecognition/SpeechRecognitionApp.swift @@ -0,0 +1,13 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +import SwiftUI + +@main +struct SpeechRecognitionApp: App { + var body: some Scene { + WindowGroup { + ContentView() + } + } +} diff --git a/mobile/examples/speech_recognition/ios/SpeechRecognition/SpeechRecognizer.swift b/mobile/examples/speech_recognition/ios/SpeechRecognition/SpeechRecognizer.swift new file mode 100644 index 0000000000000..799c46f9270a0 --- /dev/null +++ b/mobile/examples/speech_recognition/ios/SpeechRecognition/SpeechRecognizer.swift @@ -0,0 +1,97 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +import Foundation + +// these labels correspond to the model's output values +// the labels and postprocessing logic were copied and adapted from: +// https://github.com/pytorch/ios-demo-app/blob/f2b9aa196821c136d3299b99c5dd592de1fa1776/SpeechRecognition/create_wav2vec2.py#L10 +private let kLabels = [ + "", "", "", "", "|", "E", "T", "A", "O", "N", "I", "H", "S", "R", "D", "L", "U", "M", "W", "C", "F", + "G", "Y", "P", "B", "V", "K", "'", "X", "J", "Q", "Z", +] + +class SpeechRecognizer { + private let ortEnv: ORTEnv + private let ortSession: ORTSession + + enum SpeechRecognizerError: Error { + case Error(_ message: String) + } + + init() throws { + ortEnv = try ORTEnv(loggingLevel: ORTLoggingLevel.warning) + guard let modelPath = Bundle.main.path(forResource: "wav2vec2-base-960h.all", ofType: "ort") else { + throw SpeechRecognizerError.Error("Failed to find model file.") + } + ortSession = try ORTSession(env: ortEnv, modelPath: modelPath, sessionOptions: nil) + } + + private func postprocess(modelOutput: UnsafeBufferPointer) -> String { + func maxIndex(_ values: S) -> Int? where S: Sequence, S.Element == Float { + var max: (idx: Int, value: Float)? + for (idx, value) in values.enumerated() { + if max == nil || value > max!.value { + max = (idx, value) + } + } + return max?.idx + } + + func labelIndexToOutput(_ index: Int) -> String { + if index == 4 { + return " " + } else if index > 4 && index < kLabels.count { + return kLabels[index] + } + return "" + } + + precondition(modelOutput.count % kLabels.count == 0) + let n = modelOutput.count / kLabels.count + var resultLabelIndices: [Int] = [] + + for i in 0.. Result { + return Result { () -> String in + let inputShape: [NSNumber] = [1, inputData.count / MemoryLayout.stride as NSNumber] + let input = try ORTValue( + tensorData: NSMutableData(data: inputData), + elementType: ORTTensorElementDataType.float, + shape: inputShape) + + let startTime = DispatchTime.now() + let outputs = try ortSession.run( + withInputs: ["input": input], + outputNames: ["output"], + runOptions: nil) + let endTime = DispatchTime.now() + print("ORT session run time: \(Float(endTime.uptimeNanoseconds - startTime.uptimeNanoseconds) / 1.0e6) ms") + + guard let output = outputs["output"] else { + throw SpeechRecognizerError.Error("Failed to get model output.") + } + + let outputData = try output.tensorData() as Data + let result = outputData.withUnsafeBytes { (buffer: UnsafeRawBufferPointer) -> String in + let floatBuffer = buffer.bindMemory(to: Float.self) + return postprocess(modelOutput: floatBuffer) + } + + print("result: '\(result)'") + return result + } + } +} diff --git a/mobile/examples/speech_recognition/ios/SpeechRecognitionTests/Info.plist b/mobile/examples/speech_recognition/ios/SpeechRecognitionTests/Info.plist new file mode 100644 index 0000000000000..64d65ca495770 --- /dev/null +++ b/mobile/examples/speech_recognition/ios/SpeechRecognitionTests/Info.plist @@ -0,0 +1,22 @@ + + + + + CFBundleDevelopmentRegion + $(DEVELOPMENT_LANGUAGE) + CFBundleExecutable + $(EXECUTABLE_NAME) + CFBundleIdentifier + $(PRODUCT_BUNDLE_IDENTIFIER) + CFBundleInfoDictionaryVersion + 6.0 + CFBundleName + $(PRODUCT_NAME) + CFBundlePackageType + $(PRODUCT_BUNDLE_PACKAGE_TYPE) + CFBundleShortVersionString + 1.0 + CFBundleVersion + 1 + + diff --git a/mobile/examples/speech_recognition/ios/SpeechRecognitionTests/SpeechRecognitionTests.swift b/mobile/examples/speech_recognition/ios/SpeechRecognitionTests/SpeechRecognitionTests.swift new file mode 100644 index 0000000000000..01fda60d1c1ab --- /dev/null +++ b/mobile/examples/speech_recognition/ios/SpeechRecognitionTests/SpeechRecognitionTests.swift @@ -0,0 +1,14 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +import XCTest + +@testable import SpeechRecognition + +class SpeechRecognitionTests: XCTestCase { + func testModelLoadsAndRuns() throws { + let recognizer = try SpeechRecognizer() + let dummyData = Data(count: 16000 * MemoryLayout.size) + _ = try recognizer.evaluate(inputData: dummyData).get() + } +} diff --git a/mobile/examples/speech_recognition/ios/images/screenshot.png b/mobile/examples/speech_recognition/ios/images/screenshot.png new file mode 100644 index 0000000000000000000000000000000000000000..c1fce9f9e66889ba21ac93ecc041d154e9c98951 GIT binary patch literal 34359 zcmeFZbx>SS*FT5_2u?^~a7&O79D)sy1a}Aom*DO;gN6hRgS&^|Avi$?cemg=xV!6K zzR&ai-naI*`_EQw)o#@TOwHxq)2F*{_dR|3^Eu~+C@V_gVv%7XAtB+)NQ-|&LPEAg zLPGh1i4L66p9)C=zR=9yE4)WSDvQ9rHGBg6Ph%|oQ2`0bg8>QYi$4<5HE`<7CK8e> z2NKfuXCx%SL?k2_JCaEyuc1o;^fDsY4h{30QfAw6mXj*w)L z$^X~!M`Wgd%b*}31z94Y{aZ#G_3eevpxpQ$av)xh>T+T{IQs1&!@(*$hqWjZE1fwhoV_kiZZ@ z;Lz68#gGPKYh&ju2oa|HM?w%demu=iNAr(}i?uMFrh+n!n7xxJ&097OHV!%wEE*ab zu#<_I;74)Ee-{V-6Q;9pad8l2XLomZXLILfvv)FQ=M)eSVCUds=i*`oBv_rHb}ohx zRy$|sU$#!i+FE|&IoG>>`>jqF`rgz4xWEBaqQ|7xcx#PYvuvUC1-w}1|^ zKVD(yWaD7}zbfWpY4(3p?D5LKiv82Ce^m#5R3@lw2{E;JgT{~E)8&H{rdf(2&(UlT$EYqIc52@=vfBpLDdY7pez zw5M?Q@xNW};@>ezpPikO(UZ~#d?aK}3VP1PLU-D@9@!ro{QR}^>z87kbgEoK#z|e# zv_DtjJN4-s?lTwQ4c`NAyP*e>3m2cAldFdz0nd_#^O>fGp&4uZjnqHMKa99gB%3io zey>8B*W$C))q62P$n>Alp85R=`$ov|cuijNhtPEMnYMM&pO0VdYKr61dCs62usDCo zX6+~owK2-)3DijAGzwS;r~`(8JPZ|i`nzSUTVwf#Z8jlO^x;f5^ZqDBT-SDdsABiJ zdu`uGLEu+gxk4# zRND`IC@5B_50_n{z3e_mp-wGNNPfo~6#;!QT?yl3Be$zTz*h(Qq~)fI+hq`XjaRxQ zidyrx9CciSD7TYM_pVoK?7mv}he4vT*19WXYv_D6@25>-6ppJ~(8;rdEw}%UAC-Fv zKkc%ETzIg_GaM;lWyt9p+-}E?GcFp+5h0J?*!!HM9G69S|JL0QoRrCuz09E%bhQ~C z@;R?4Kbz1=(3#&>cmAH@tf<|4zi02uB6^F!?=yI~o^;A0pJ(#?%A$Yty{L=RT(N?q zJNSN;QIxrh!dV_VQIY;>!oc@#oR0jCbJ+LcX0b0%ia{d1JA{J-5u@8-OVG{rrCK>OKhs9%_P!xZOG^n7RQU-{y3c#8VGCUu-M)X z`QAIbEwr;xFbqDN=85KLNZimUjhSL2k6#3k=xd7HZ7C(#R?k<>o#yhfZlpRZ?(DKw zw$EWy7R=7Azg@driC`zUtyHN!|5GBI6;||8=y|fVA((q#XYrbNUW%yqtrDb_=opNn z<-A|JVFvbEjw{{&(^}t^40P{C|0FU9oE?&qr(rk1BH(Q}Ut{mJLO#H15AFM#;sxDqa8_bT1q_h#1CD_zYu`fhGRgl|k3=$5_l3VQF!MZgQ0K$3ZSk|y(z$WrN2L$)~LF54-H(oNIz#J_$4eAT052Mq1aAH!!(BN8M<&(AX+ zZly+Wj|h&(YeFV#L2;s!^N@~lryb|fUK)WZwTQW+r_lL*F(y!h}Cy?Bykf5}_kz@~fh0Y6)3RH{{gf0_hBu>%quEr?Y zxh;BMz!DBM;L9&?agzgKh`V*ZrfIJ|YXj@nHpTc7uQe9t4P1MOnA@()akuy3H|n{G zA{QFJLCY~6n=%RK+1Z7v+J&>?1Xm^a%rJ(?p;L<#{q*;`8U~IH4((KdS3xvPJgz;lfG! z1y6P3S&Zs}(24C`Q(dAY-v zJS`9mCJ!e}s2FWi^N$FMB8Oyx2qb%w>i{oBD2Zm|jz=PVT%1owq|C{9kBe#S8d>Y z4g=c0qlKi91;5*~w>M(TziPqv8TPuUU$g~gSsKH747YF^T0mIs$~sZTr8rSx3F*Q` z5MLM~0Q8z0YZEo00N?KkHM;&^){0i|U8#2vLmT5T+9UN1x$nji!f)F|PLhVQk+wzG z{d5c=#DDkt^v}!JyXhlrjQnz-w=2|(UPqw@@>D$=A2vW{+pZ6H7xuyi9Woq~-Owde z>Zw%s1t>IpLt&Pf-4;h+wPDt-er-7RRP_ERf;-icV7a!3W}XC!XlkMV;YOoGMn8}r zD%eyhNv1?0@F%L%FgEPd0&j4Gq&BRdaADuA+B)phJP0wu>%{{hTp)68mS=g#?lBUY zEQTi_J_2ob__=$vk+B%a9s3HFl9Uqb+$zo)l3}>o6`61e*usj;U>Jn z&SLC6;b?r~vutGbQbX>Ql0Yf>w{h3aFncfK;%oy1mIf1o*;Mn?2D*CiTBBArogUx& zh6yYEy8KMy6fgv}b8BqwJ(TF(p<4;ZFmrGg+UEja7qkM#+j%{VXvE4crp4^aJa9jq zMr9b|x3AbGYU>LG|YDHQgE556;!Ox z9BgCmt_wZ;_AGSHbWDMd^>6JlW7&+Ryh`?OP+DB#Iuk|a;rCgu*l;gPBE#;!^%Q&k zRxx&JV9B4Ir%4+2O|V3>;3L>`bd6jcFP~~?-;a{{Yn{z0TZe3f$Zaoe0`D5)_eruF z2bbNACN!$;I|K;zwQ#4Wii(0e=@gBT-{BtLvM^%*UrJQgYiv4s>e;3 zQ0KtNuC&|o2kR12`;6q%128Vz9>K>nXGPCtGAPI_xG4Q`zUj14ga*{^MOWpJhmdm< zcgaD9SgCDa=C*zed|?U4rcN}yRu$i_wH+%NFlEoIkBm9?w{zcuSEw#;79QjjE z8M_OuGHg1o0qA;ilpel+TV4RDgPTM_*9K+iM-8cyUqool6&H-w0aCu_MeUayGeHbq zrzH*l4kG4w{z?hb{q2b~6BcU~lv3*r*1M1BP7Bu))q~P`$3vK>9j|F=aCp1={=PUn z1xy%InYo{qkPVO2fG%bBR`|`rMI-JG#fVJx9&DbDK$Sv1N5!`i`!aER38P8cqtp(W z9Xf(Ho#QX^JB}b+Kax0Q#BZyn9#~UIA9^(!xmJ`!?@r+pYIDD;8lEpVy$by@gY5>} z;@?idMXQV&Kgv=Vt5OVOHc&_c)>mZ@;VtEtp5LV-@iF}0vkL8&is7k}$}Yd}QVcz1 zD!qk>LH^=Hno&n>yNthr0zZJ;f1Kb1i^4N4c8exI)PSwwOiXTWu~V-fSq%C&`vJ1IGGIHt4s_uS< zU={l-ASMrXoI#mX2JOV=RmplTwR}t5VJkb-b(?eSS`8(C6JeDHn7BR)6j+_5C@l4S z*1pLG;|$Z5_6eSGs;ZXtA?>Yd&~4R>WRJf`&OJ`IbC{S)I6Aw#%n0xJsg%?) zVpq?^mmzrRZXJa5LoweydLwwTd_X#D-za8YdL=K$+sx2<46kdhC?_ArVx{rehPzY> zta7=cP}{!f`euFn({}9XQmIRZw3@yM9y#(K+!;z$6Z!fh`rA{x?D0(Oc@s8Rq?c8sS2%vM;w$8bC_} zuP>-aIP0-^CnzBbSEk~GC(pLK$GGAdP=0yBUMs%N@@-|qHLM18l(7ZI2vH{|L`flK4uR*;Jdm56>7!H@mCPhBT*!uIsu(_inSNB{C| zyvh{ee%xZ|enmq6#M4vb*5+DN3Gxh}wQlV6J(bjMzOg`qeIeBRj@f^Qew&m3$Mv7x z8Vn7Yd@D+=GSm_c*33az8bxK+pk)p0+9bbz2|Ja3J=4L1BKbKHb4nV&V&V56U>isO zzFE@a<%>NdGGuxVc{H2sAT9fe8DQHNMe~_nIQqLAXV@1)5Ch8F(YD`w*0z9JX#zm4 zj6{7R&^#O>pgzNils|6)wJ!m+5!pZWK(~bX(#(yT&f1Zv1SgQXR-2bc_|5?TV zh-y|29*uupfrjw|tcMScl@@;3rP+j2RvL_^qq08%Kr zCNKn5yjr1t&~jU31%NWZ zvV{~DRlROC{ZL-;ggrUx9suOh_>Qxqy|&~GdeLdWxcFx{=kMe&!`a!0+6@LO1TSAE zV0i)Z$_5n?Vj*3rba2#1EVi9RPEV2s!|(mm~3D$JP92JC$*=JxE|`%;p~ zbzc`N?y3|^v)k-975}eASTV5vx4jbTGgue+@LFzQWJDXIBFVsK# z#kvCw;TjldR+9m+R~0o)I{I9*;5;N;F$ZCZJRU2X2;vA}bnY-X8Fy;WD@~4Y4w&@9 zVwAt-&1hxvRB#mx>?GjhSEY%7wA1(A9r!+wo@wNzxfCD2R$9w+7T3b=`PT?}!w2ngI80-Q{v<#fA~td&@zg1NiZd!Gj0(Ajcu7Rx>y?#0W1HeHfNuC1euJ!t2+!G~*_Oaol z#>|hrO&Xq|qiY_M_4?BeZMfK`oh&Ui)Lw@z%;A|YP1VJgf(JMI9Cp59vst*tgSQmw zL?5n(dV=n;NHR_()&ZRBAogka;RpXv5_`n$$&iv>Fe-*5#?qVlU`Am_0?8UVJ&NX_ z)98`4gL-v=;gGtn9aIcy!B7kw3l^8_N7l}4GjxAbzTtu}F%q3LCis>E@<<#lOC>j! z|Mgl5)Rn$=QWV+Dio)24Et+L(sb=H0wEn;_^&@6GhQo(oP5r)7>Uf;@=racXUOfNm z7jCZl>yKTd36-0#wYq=Yji|RsU$NkCiO-M3BLXlpiu9ukHk4cypW9vngQ6F1lv5qu z92DOA;?5DXC*yd+^tUP3^^biEC1r1RxpOjsz-ZMRq*rTQ!|4K{4C|IBf?sT(lf+H= zcf}n!67l4vxoM}ztediXYNbE240b4|^5pN{D)}78a8o4RJ_7s#KSGfFNKz>DkDFcNz?u>ttc>oh14SNrG40=KRf!k2RM*2ngG<;$1s(4^oM8n) z4B%N(IKYkRBwKX%R$aA{G)5jBHr}6=*h5#$ESAx29+A5$!&v5AkK|1n?%nJdMXR-T z3#PV4zzCU5-QOH6?lxR3*xu?gj^5NS-r0$z75WHJd7qAOm;Ma@YuE1HjEuo!$iqeM zf{_If6v8`6?|Lx0@_-Hv=pA@WR_UE>xm|DN@fr4;Z8{$JFjoJLCQ;Q<_^r^tmnwS+8vQj~>abbqSGP7Rd|+`J#a(Y>d2) z_57^xUjEhB_`Rf^uV)tlpmDQ3?Co><=J*X{e8@ZJ^J#N)pIblnEn_V9HjDZ>bN8Ay z%H#vD4WR*4>dWT0w*7j1oUi3Y{-|6=h~8huV`yz zJ`^f(o28NV`e|@+6oalKs24+c^Zmdhk7{F)aoNuJEcLK=6f~AE3h;fo>2qIDVgkce zyiQUUQw>3=cJk73C%z9CzR-Jsm97PtTm2X>#&n3$@vy}w1dq7gb670w8?2B_n&G>s zUehCql@91!=Y63NM6-fc0&|gfHTjscLMMr=02#N;AHVuRZ#oMs(a8~BtMmnZ0GkHK z@$3>>h~m1;df)2ar?XIg1uWBZUiT*%_C|K#HjC0ty6Ytj(L{uj=ge4gg!is-2z}?b z(??olMp2B?SbmZx0@`m(#?kdoS<2pHEG3%yw+O&_m@(D?tJ9yrpKnL!R?C=XZUMFO zF8~Xz!OLkCDOf=j5Ya2;JqlG$_+>mlg-FU#oZH>}&Y6S9JefsxmS2S`i0~)kf|28W zWDKc5V(-NLxNuf0$hM6{S);X;DlK&v+?`UI*lNYGR4HS4KGh{sHU4A;YZ2fi+tx{k zAd<%bV@GY(YHVEv@GJ}y?4!YsCGAEw&H!^Ie6>|7URD)jcu_m#UA|wdp?@5mU^4Z{ ziJPF_08YgospS3UxA54gw@UMwT>zfCNa5ViNh+nofMXnrid;ao06g65yp{%sUrAn4 z5B4B}s29BUoinU0j5%Kq9v<$-Bs4t|j---4R-W05j$M@8x~^Kriwye$2$7M7p1XHC zXnC^>=T+Rt>plQwwU?~<*UoJLx?w{SYY>i3DHQ%f{ANNpFf$MzUW4I2ZJH1(Cq!VM z{^oe`C;1lW=WjJ}UN8*FKQp@pe%ZlpXaD+@;A*58SFv49yNXtJ=E%E;o2G}9Nlu%7 z>w60m9!WdhV04M(VnTtP-xZ@j`iAAL#9i2|h7UhOdc@KqV3lB@){vlxly7iO4rLt2 z3gph)TO4{h3jSsm0VR*om}4@fd;rb{-+6J1al9aV%qzp*G(cPnrLO1@R5)e^7 zhJ3qgE}BA%w<1%^u^b8GX zE@|ZX(FXGLHm9g|$@5b&T`iqWdMp(^W3x|=!p|z1axDF*PlkIBcx_T96UdgFU|tn_ z7)@}@kqUSa8_}vKne2)FbMKefl22g>9;~vGUfMQ-YXsfu<7`%Xrp!gcC==Ce%UuMw zYRpXj4A(`7Ab*_1?e^{n{R+uhx#2xvE_IjNG;=pNJ$R!PGNZ6 zCjhZ3Z=7|Vq42Wi%aiMv`14mkxeU&1Na2VORj(OqBi>C*(YTU2wg}31etq%GFB>z!>fr}kLqt?nzJoS(ksjJXo!f|@K zqTAZNio)!)l55iu#HV4T_?D!^D7~s(nz>pr@O|I;bIG3_r3%j(5r5)0Q+vIE^&STg z_0!Y=rRtm7O$D0JQDv8NXf{r3Fs0}&HNJ5Iwt8!o=SyD;Ak=3dYc$BQ$GW3X#CYI` zKVP!r?YQkrm@&d98prUL^<`8SN>!_iTAI`bvJSfp$#`KeomtMOdczS4I{dY^KxFB) zzEFXCwT&f8$qIiJRz&|ACq;}rC8Z?GlW7fLqjI8sDLxkcGO9}p&$;br>Z%cA{Ix@N8Y0o1M_L(kW|o)(!3MvPu zd%-ECVTAWq!e|MK6!6`=LgYn<`a3fI@=VZnd}3j58j}?bsmk3rlxVJj*cwM*HifLM z6?Z@dH_gTB7J-OMtVk`>Rs@@xGRmKxJ`>td;bPnhK;oT{0ax@S#^&Oc4E2<-aZmEL z$bmawud`w6G))zQs6JXVwbe5}T5UNuySbK9U}QIF5{ah^y+$bD(@djqPO` z%39U^Hj_WxMZPbdwGZbx8(d@1)hT#XaXnE;Qb)!3G`fG&?|zAp${}z|zr=O7F9p(S zOqSLDS1)7)_>*y4+AVE$SUHrYYPC*E(%MkE!W>g{(f^*uZpYb1JTGnhd^~32$tQ`w zhJqxT;CXbzzwYy{8WKw2H)_XQ38rm1Bz5)UFMKuEnZGewj;aqHD}b z%$x>V9PzU@6$&9Qc7Ho;ad1{>(XHAlcMi1Pz-SFao}@VeK}+<);?Y<{a2|N4Ot!5& zh>YVMI~kdj4}=3E4k3&t6&q>C`G$jkT{@2$jWNZ0Z?*avwJZSZKrW1ol=C6Uu{)K0 zpra6C$1nKG2lPr3^cOdsnf_N|-_V;{#}PSy!+@6<-cPf>ql_ti>4%^ZKXlqp45FM=l_O}{vF&CYV8{ADgp!K2VF(t zknnGMiEOPeC&_6*%Qzb%Ym|KmvxubAg*IYV2^qgjH1G^pdP_FaLHACWGmcgCN?D1& zGq=J?w$CY5wR(-hs7P!xD#^6_UCsQEpT2v}c4#JsYA;00R!-7k{3|4rMhWR-l=7RI z*r~e_vKKgSUy#>xI{osO6zhN=$Cbz*gz~6y;s0pwnH8Zpl$!E^ z51XDW-)@(*bXlp6d)v>5ig)Mx=`9nsVd01Dme7yniLu1?0u#@*MYhA4Q>JR8WtxMi z(^7Fr2w`s%_rLo835Tq4PNs>@1mw=E@g?P7XL&c?%pceZq5kBUf|P!yb}pK(+CSbGCiKk|Dd10WhgI? zA*~H!s(Wa`U@07H75u1{DOwG_$e35gu$deav8ZCSLIf2_EfLVe-<1*W&}{VEb0+HhhBM;>N!Y!?*QvB$w928k=sVw~Dy}WNPd$yZnYJ`K8aQ z+F9`TT}?_&!_U8a{|Ij_>vfpHMMFm%X2XGah@MDFI$`mfNj55%2;OjACKE?q$x3F8 z`p+-viP)B@8`{-mo~*26eBN8iXL*msB!81#`<}K(8LYEX7e^f`q|m?`G5?gS*NTFo z7W#8iJzk;T(LAdNI_25tRuB+ z+1zj}a}3LwkS01>3v=j>VS z9wdtMRXEt!#C|1KP(Tz)hSLBE3JMgD;R^dcfDnjyKH0`|sgjT>%Tsl(q8BJCk1AaZ zSu)4$Xr-})j8SoACNsV+^{z~0Mj;kX)UK_}c2Zt%M39JqAs|WaY!W}!-KPrkP}Ou| zL#s5z6V+UsR);n6v&3Jt)JI#G>}}OGA>5+593Vll<@dzbKjie>I@m~78TT`Pt=aN$RMB?$JV*-D^q#yi*mmhxl4H!J%9rcyrTaM(er%2$ zeG$y@sne6$N}yL&DhHAL7R{|Y%lq^X?d5PnRp)c!Yc6)V`j|H}Z@^2~W^^tL5qA97 zD#SCA^CHqxdd%#9q6FD{0wZc2mwA+6^3ss+)rAHb`kNv8OFGq-M_&mqhtmhcI~?6= zT=E!=ZuA#Qc0Y!UDBVo7LqzEEO3eyMTyns_qYiyQd%gLyv|A%Ycf%FAc0wBS&0;z$ zLGneuvYec>m`KSFY+y+$Xo4>=Td@$QaU-gWZkuSRAvZJ%6^YqP&+tmNYl{X-3 zHS5bFpKOTlmqC0t=yr_me+(?Ag|hPCLcj63Qc`&sda^1$Pg8?IWFhyj&eu2S1gr@- zfAtT#DZtAi3d=myUL>~e7`?x-Kov*Z>j6^YiLFn^paWSB9b3u9)YOQCcr%uuje>!qX{pp7@T5SNS&cS%kOxVpe>38k&j?(@UCn#OVWVOGv737r>7dJ z;k-(GFl%^cq8k%uN&2rc?jKaPr(1=k#c)AgM_0`vY{a3<)zVgAOZ{s^c7~F^p2%8Z zWgUaBibjOU0-F+F(XufXEiQY#3e9;_#Vo2(kOKH`+{VntZ;2(5hG@r*4437P@LpA~ zWfgd&?hG~Sic#&gI;HQ|Svwoj^edfv6M&%9?XokX=sbEj2=8e|o zOGfxV1UUBdZG;o$hC+;A-Ta;IP!nZpJ$WiGc37HZX$l0IXmqyICK#fbfZ5t4bh#u_ zaCfGca#gXu!6lsrOWC#`S$$PU_iG*soZJ45|GopOs8{I8gB~)HK?Mr#WvPE#vzfO;SOTEFnm8@ac*=IJx zZgSeABDfr@SpFg}3&PM58UZ=l&ab&<65#cvG{~5Qr;5okHVT%A_=LTNJfSb)2`iCh zv1Gv}ozFP7PSRM_+feBm-xyVaw{JC&dSqZ3!kx~vhCI}DG`yj$=0NDt(lfM)OJl@b zK8ToC3}Y z6>PP-+dfRD7KRiI5K2+JcZi0#szDZ2);3o@rH*cC!?}80Q1nJMoYgeuAqGH1Q7IQn zs8x1t6x5oWy&h4d1^(5i(*Zeo-sx5WfeM1_5YYN02;!T8*|08d@GEkG1w{z9)R5-Bjo|3 z>&#oH4L|-#Z$B0dUQpLKp{3Tp91bm4Q8zhW7g!+6WzSbeKc=OcK6DOCk2NSy#FmsQ z{Q(Cl^h%P}i&Lygm=z_IzT-m-)k0jdZB{$zn>W9ZEP7j$LwmotpIkh~U{%l%@Xh>G z27UFC@y&1LZ@fFYH)+rn#*y28cG4f#xU6&1+U4n=$$?b_<1pJ3IFJZz1+7JS(Kak) zkW`!~TK23-EE5=rhLapsBEC=7yT&lz4@V_@W6_st#MEobNV+{c_Be7_6>fyDQbyaQ z?)0_uvKtKG+4G-#52!hpLmWBI-%7t9=&Nn~0t0({1M%tj8(d9q=GRI}jJ020T>|^S z^EM=2{5o;-1h3Wvch3{LhvbM0@XpRkd+4c;ZNr5Q*YkrcpeHQ!Uo?0=MbnNd^i6VK zevt4I{oV3zdxx-ouS$pPS!|SJTl3+vEsV?{p_$-t*&WlkheXI^DJsBl+s9Gt$~e0R zmGo!;9?0%~_y?C%U7t#kg1uCV%|JBdVA)g?zVY^CRS7+rq^s+vX5*#4ff~Ye+l3>t zG2X9b+4Q3OZ|GSMBm8i6B_e{HplVXzbFFW?1^w>w7Y5?U=m*cRi}DfeAkIn_27EU) zAz$?zVyF-}HvR5sa2=j2_LZX;o=yE`B`#qVVh1+NJK(T zP*4UOK1Hb)Ie>vIJ%mpo0Y6RSAh)Dl<`C_bVW?B<%(@Zsdh7OKj{-(r1&Qe<3Ha z#~FGZcnBQ`TX+R~3x|x*c}vQ4hB&P0WEf`yoZ9T|Z$)_)CMea_*C$SA9XJw3QyhUu zvK=_lh|q}0s`NU@%C{_L`dsSKLwa@*qgjvp<(SNH`%6;-=wa=f%P0dP$+$yEfv0u$ z+0%S7)KMm1QVHm2Z{Cpg8n;e&IQ!@7wU+K^#*CcpXBn(?ikgMo9V;cwxuRrnZ^+gU z@oTHz$Ea4E?J*g+2oL~RO5rClJ>Qb9)wtg;WsqbZmQpcK>Nu9Nz+-^qvt5w4?p4tk zIM5}O6k)0#3)O6Fey`1BcMO&!Jbn?Rd-w#43CBe!FAaH|+PND(ln7P5Y<*|;20LQ7 ze2R;>6MXX0KqR#%A+xR23^Yk#KwxgCyGFAMAkyZ8Lfm}6xeiC28+}wxREi(d6D~Mn z@EWw1LJQ(kW2D&c-iAe#l$+w}@sJE@FkjNJXe#@}y+0W_RHVBcd15qp{g~k}7;}Q+ z)?J!m>!inB!b+^|%V|?=%%qwFw!|H2Vi@X~eU@UUE*9@))~Uqpn7 z+o_dc4*2i8i$&2m2(UBMGurq-7J3Qy05}h-7{uXn6lWYkXm9|+Z4cV!0?=9{xi6;{ z->}OE#?)#{>$~f#`5vwydJ~CWkwPHKyA+liRkfX=*{Fv5#0Mm8{f$nb&Zo6m znhpwYO=wu${9`q71~^D9k3;4;p6KU>Evf3J1q}hu!6vZ&^aAToG6U?5-8jquw7p*1 zhS$e&zVX349crfE#Uta~O&i2>;B!%*!f{O6iT90TeGd=s9G#vU-nRbZ8(yz1q?@Mc zI^TQLCZxPl5iM48q4*JMDuCw8}V4}`_H%kcU zC8K_C+v;rFox;)E98MyAWlf$j>HuDgRpp}G_}oSeJ`M|LbK{eQlrT$ql=Yz+kyCF? z&Eoh?0|e<`98VxyXs$ig#f}lYou5VKHc(pegi5+jJTFG)VCp*wrA)3VOFE{{-eoWz zL!T0O7Wo1QmS3Gz#2allG79WKar&-2{c1=Gc+%7*k?hU3)fQp9_-3<|O&|vI1)Gc3 zAKeaw-TGFr-g48vrm*F z8?}?yn~JAOH7%DRFIg7YCU@%(8WrRRc*-!R{3qwyH6lAE$n>i!I_h610}%~iS7Rl9 zN<94Bcil1a#m0Pcp}3xjneIS_T2j0>0%QfF^DOx!LfNtdBnNsDQ9*L)@5HE%BLgv{ zX^u_U`EULjn?!n+awcyM-aA$ydZgQYJ4wy64a2uW7~Fnm`*lEAL@BrUH}j`*r76qf zqFKW@zgig8Wk1Hv5GIIAVd{^TvDHJo9s|P}P(sf|wjd5KsVU70lLMC=RX1WkfZ(aK zCWhg(&iPZv7KXCUm3|{y^A^{NgEGF`d`)ip+U4TnaaIwa~`T;mI0PpE27~STU%*90sq-SLoq0P5>*N z=Z8*BYOy6Lw&v66d-%2NI&`%$+XD z4=}#*28Ls)Jfit0*_KRTpo0Y)`);`|WlU*ie?^C0Jg4I{5d4o>4?_Q38=(7==enG{ z!m-$p?=3Hx2p@_h5*mOJle+m*-Ai4W7 zFW^}LW})Gu?50?=WF7vCAYcqWqs3za(fM-ObiV_EyCC@kABgl9QKK;XC+&a^2=!-= z`d&)*j}j4J+jXPtnD{97IRwZXXb^whYW1JW{$IR}1LnoQ|La`<|6d$Gzdtu9lxAq^ z7$^#umiI#MN40Zw*?bs)X=Nq_B!IYiQPjUfOD-L@4j|OG4lg4M^y;p<{_4w)0Wep@ zn6WXWegeQWO-H4XDiK8C8uq=DVm7$kC$~6a@sRGiTO^&) z*X$)!B8#4waFu?{ZYiij|5*rMalY5?6*eH2wFe@stVp+;x}Y^fC36k2c?&GC%e4fl zo~%xbYJa{3{eG}a)z$G}3!@g~L6DKJSi+Uxu>EdF)UkEQ1TEJect~QuP*!M8|2h3J z$-b858Sj+9Drac*1-co^&bHRhEjPU}^>HG6=kB0kjv!BpvUxm^km3fs_f|fs2nmjR zpgZ)6yW@A{r;HK;=McQKTQ;Q)?ylHylS>OU;>SnugR*n-6}^KGGx=|w-cQ9fw|V|b zQw6aH55=*08QQ(5`_Bi@dD%DzHy#RsEY#V~wnh)om21flwY~_hc1V?cK`Kd8VUb2} z9DRl>U6{0&?!(>JkAt{+#b5dhW6Sk7RmVh)dzV{7KTNU&8B=_Hfats68AkGjTaseZ z;|~e=Vv`!i^38p|I$G8QHR#q&^5umfMn`+COG1dg9Pn*6CVrdF*N3{4_~XW#07jp&8oX$PFeA>sAvOEj59y3_ z8_;5rvG!W`O@dQj;?BMOs^8@OMWpwsC1;V7>|p@mU_nTj?A4*SZ0)42I%sN1===mR z-F0x{8RultEsTiC0dM%_P~4VE6P<1y#FdZTtgKcag${nM2$Q=~I~+y6yIOPrTzZ$s ziGx=hkA-4*o47QtE)*`X_YN}5J`~ne7${ZOs_N5cE<1gZ|52ZOjJ~=_3v_U(yGR&# zQp{O$x3-bs)J4vwDwq3Jr3g-MmT0v`Z8bC?!AInbS4*Xy;qcX6~f zVXs_&={~6DV>Zp==(OfkINss7zF=Kz$&A^g&fEbU35`BksDumE5N;$CxhOd5jCrpvbaYsbIZvrkhOhg4KFFP1!D(nA|EoD!Hhzfar;8Z^Se@bpsk;qsVb5en#4tQ0Mqj7=d31LuH;I{IbZ0p$_Z6GI5QQ0}A2|9$u|? zKBWP>+}f{V=&?aEogWB&IpdsG4$mdJ{qD$i;>$IP*XC=MCEA@Yw=b3-qo<3Bu~#WI`&#od8BJ zPC`9xTCtXXU*k>Brwf-wr9FL0UuzrmqnI{^6DkG?#@12@x3K9;$pDB`KkT#iUoVTJNWogru*ri zn8`5M``F#)9 zlnbZiB0%I@mh80A3P3^+Y}U8?>0aU7lD@Su+5 z^`0DbILa`x&0+B+4j4a!;%qt?=G}kc{ZhBglpS{Jd_b9gQ)XCbY_;~bEM}!O@-DNa z{ev0kI3}I&rqsGb80_s5G@Nm@bGD2J?Nlt&JIE=ExK5mcIz8!}@RI>dqCn z;H^-OFP`boQVn1RkcQthR1c>g-I`DfG(dwK3niWqX1;nw=7?(!zoSrjcWw1vcjK;3 z*5anr2&uu0KR3sD=ocY_KT_*b-B+Y9%FJ2$&ifb}vc#4YgP%z4ZG{(}-zpjgi#oq! zRuU|Tt#1;bD+Z-WyWlX2mdEmWgLBmq1vyFmm}yJZiFjU36AL+^9g0I~0*Y+pxm{$A z@tY;!B3SNxW_WUoG40~$B0xd*z=6J08E}M2zNd~2VEf5Pehj%;emb^V z3N|}(kFadkKpX6=f6Y}7{X5xfUj?`F$sqTW3k7UXj>Pa5unFftQ5FB(VfK@G{qxA9 zR}G+XpC8rXq!9tg+-J1h2tf9f(+%$*m-`7l;AMZ_OdWm{Cswm8Shd6o<9c)wj^L=T80HO z|MB9l0YN2dU&cT-dbnX9Fqo(P3t#!I_2*hYt|Nb+fePx}TYf5MH z%=M~fol&47r|@cBHt$jd>b&+TFDmsaJ1Ml9e6gYUWKH*I-l6#Bl+f28)g*WWNH)s0 zFWDg!Ir{yJVvUxxP7;rcc5p*XFKs=>OvHnpTUhvjrLDp|6V%m}hy0Q_YF`$}pv9&-lp-`cm zZa00i`aOu>qUxM}ffu?PpXu4a;Hnb`Cy_g_yV=|3XBA$L7Siz27_!u3{}rX4De#zW z!upsG(-+};yHdX3u@m>tTdsijr9h`Yf5TPha(I0UymBhx7>H5Ne|&=u>tohSEdtj* z(bDcQiHSuQfyI)e>hqZ3#7#YVhrst`O0lc6%&k8Pv)3(l$zl1k)ec;H5Lq)g`mUii zQFKu|B1xXpC?4GOf3$bje@(srAGQOL5D}1;m~_`1B}&5pg9d3(ItPqWLL>>crh= zgdi|N7!9LET4EccV+f=C9^U`O_wo4bxAVg}J8_@;oO3^4*LB;OczoVIhX-)y9K$&< zWm*xm@#bJR<^WI~Ih+^%_MVe5RnC)Dzi7sHUOi<3EM@b_04;KaJI7K!Q5MkV#V?-% zrHPd#loVb-zpBTA*J^g(8=CZbo)6#*NKcoSH*PlI+HQ$x zo1wgN){@_tubF(a_kSU?{ZPmQTug3R5G6)x4( zzRAbGeskoUju4dDhgd#l@4fza4b^g{*)x=LLdc>6CII!v9k+<~ zGOuq}V$RiFd~e+;Qy~iRi&yu)Pgp2yU-k38*r+2tS%3}hl-r1C$i5Y2d9ui0oHo`3 zR$|hf!T~m&9DlEtPbzt4r23W5F;%A}5Q*gIm~$0W22hPV_^-xJpL`B!;hIi!w%{O- zd^T5rI7`c#S>`3yM56?f-mJ!vJU3sn(5w2lUaJ3Ieh$BU)zLpLZVJ0<_tpP+*CP3w zp0)l%rC#`@WIT_E|4m{2vkJieRZF<2yLZku8Rm^b6`SgbI&M?D=N=&E!Mu}Ykjy#w zCXur{yaQa1fNH=yR*SY0o!$djv;64J@uEfm@!2~8)Mi?JT=G8X!Jy4i_<4CdrVReN zklqi74hpj`Q^|fLE^)=d&dC$rEkI4SC9Ul-50GP-=S0OkvSxZ`x!+Cuxa*EUIdM3ToM7$UP2xP-w zAS1yKW)s!eCbOtm>ww#187LZ8813WAY5OJxC~2CIX#t_D;;ScI?Bs4t)h`_W-i_>TpDHJ zwF@u{vSfgrtD#Jz6iy!GV%vES?5FHI0w6?t?UmLz!?TpYs{17H3~&I|L}LZXJRlMN zdXd1%K9k@P=uyN^!x4x9Vxhd|WPziWFbM0E$Xu~aLS=1Ha>lqTL&bayIREV+LL>vX zn*1(#jOnXM4Xc1?7CNLDM*{&%8B>&Uq^%NA#yw~UBb5&TWMap);0qX|SI#aw9`W(d}1iPD=%^z;0|Gd`ON^% z<>m%S5sAR$-CxlF<&5EXs`E|-^WRwHL@^AVo6CBRF(3u8Q2}99s;{xnbma=kfI=rC!VxfiK(|IOhRlkAKxQr#Kx30@>bJuYp>r=L)P} ztcx45O|d0`;wxSOvN9K3wnrxuQ#lFT3^Xk*HlGBF^NuQSA#mfn;XBubIOzo^=~sRL zB4|JJ^E&&?qCjkNT|7XfdF`ER^F%;9G^Mgr9p_kPG8DwZg-RLuymWr8m(NSFOwRhT@RLqjk;|G#fR2lM|D%hSn-VgE zLEy|6u%zM1gCW7^tzzAOFwny5T(jr$h?_%~Jhra|%bPjRZ+t$`y39Fr>t&=2{yUxj zC*{|?m*e0qJ9ChH`EpMXD)15dI5Ix$2T&auOm|y~Jfz#2c${3j8x4g&7;OR+g*s+G z5wOLA|D=n{lK{1Ge~M3+P5Nq9fv>I{$cJXR(Ws_G^*-X((2nTw4K8Mt!sW!DI7XG^ zYl9hCrHJK3cdi)g^cnZ|#5~IMMeh>;P{`XkmqJRY$7xy!vFWF+I9eRGG1!BpEMA~t zJOqGGQri90o&`Xvu@(LuQ48Tw|M5+K?a6>qY8LXh5;3qn^^u=F9)>Asul}i|)eJF^ zssi6u&60%a?P}wA=&-l7x-N1;0{~;5+km$@mrfKY0C4lALb~gL9%p_mH+Q*&HT%2u zT@@3o#@@Fm`En^FvNYd286@EUVS*M#`R>9^>W*=>LI>L?bF18KMJXUP))<@kG-v#}3!wwZXOieNzhxjI|&H}fbwiuRUIGa&u4&E9ooTC&Y@sdXBw zJzk{$Tkaq)AV1l_H6*W8Jk0|pSD1nCkrtyE_}9Bt$Tac!JkU&ORa36pos|d$fQqs| z0r;BSVdQvMndlTq9H6vdR|jOR$D$E@za+OM9vGUH2$5#%f=)&$8*MWbYI3hd?8*)L z0OHSNMuRlbY%&+BL>(NR9krBQKeA2PdyW^~X>WW^n4UlLy@{v~_cN8AgDpwNjr_c; z(s$=}Eo8$9+DGPVYZFcTp{&Umo?RE$I{^5`mTIS6O)Y?DN*bdu4xNQZNs4rxa23?7 zbVL3GA8(|2y)4K#%Y3AbUqwJZI=1pqYGVn%(7ht6mvQGP+9W`s-r6HP=w%(f#uNER z4rIC}oMU|@v1-Ap*`h%!vNv?|gabd`&X~Gw=E;^QikEZqYn)Db29^IF4l-!tphF`e zZO?_VpN>rY;Xo8AWVNozn6yk>9oJ!C3)$@$u&e;M-u@DY9(cD3o57;$V7*uRv29U; z|5;dwx3lpt2Y}z7p`S@-6%${JQd-$ zDv@aJNGdepWS%9x(VdLO?DI6(u{(s`Jg1XJCFHrpF?$9JPe_RLuS?QUR zh}Z-Ens@&RXZ062^y(N{)#1zZa~9G2aq9)=(k&46!eW@^hA`xbumi?UX#VW*P(L}~nyR*#j3Pxa zsOUTzlDC-f;x#L1IWvHAdc7oaewDo3%WYtHfud2#noYoujV}BR{{^D8&H*m59Xk{-lsnrFP4_rZv}Vc`a7F3W<$-VC{DS17y8U zyPq3Utzs6cK2LGQ-8N9ccyU8rRngqOuauAjv?>vX(OrzAIvbjBaqJFA?oSx#&aGU# z&~H)B$Lx+pz3 zRMcq7TE-L{`72>$pwqO=>CLn=@%f~7B4j&^x}Qi7@de*qOrLsheuG%=+3sO3sl%f1upgu8SH8+N)wz40UxS*r9N zOZS+4iav*Vnreq~;j52sr`(z7%Rl|pN5B_T&L~Z+(O4vbCK2~b{$~xhcq@XBNfjA{ zX;Lv?oxApEH-9hY>5g+9|0C=bPxl7Q1jtLlN(^P`>QiR=o$NcR-pqoY+}v@Uaxw!9 z3olU}xt*EIlUC151<`7F@eKA3bsIR*gP7=(|BL7oFBXMS36?Z-=_sp zykFLrluIhWe1N=p_c;qmEd0Iu(mKmpx7fYfqrE@P_95d_%%#`94ccL%2Us0FI`oX> zirS+DKWb_&`rcL&gG+>5|L@a8wgxa2(X20fP;TR$lWcFVf`_ZBsKaV(B2-Wz_4oep zhsV(IKxa8E6kU!}@vA-H%S3*^O-^ZUmE+q3OZSUvQxss#<2SyJucvExVo8s`>2<05 zXPCTq@iVs69@H1vlREKf5}$gXBP&;>bZs<##e6MluOpce+jEW|*P6SFbiFl?cTI@? z8QE$Uxi|8PIg5sZ%f^1XiL@F}=w1*_`&{O>rX#%aMJXme zyj*h8$IJb)RgGA!-Ym`Pn|q9$QnET1yF+y}LG3RLQ@wS==+2(U+x4aLbU8bTjxe%8 zj9f3h2rmCfxOAJDP)}RXScTyV!zh+gYTs1;?vbP**h@pLP|3wf7WI{cEJurQEuxw` zKXy_jkSH_${!ul#AqMSX5w>9eK~#Na0edww#%SYR7HuWd`JmRO&@YCeXGY%Xh=dvi z@TPi=d;0#eH>9BVG>bTR$>N%XL z8&j&{KfXNBt(0D&IX9BxX(KC@(>8XSnmnXYu6bQ(?ze7cx;W3f{IJ_4tW>MC$YTCk zrk?FkoptO(!A0>)`CQ4jY3IKr>!Xt+sNOY9T*_bczKb_eS)EH;G%a5mi>sk{JKVu+ zbEplyX$14Q&~`g2>>~UgGso>5Ym~Kxgm%=QVp?}@c~3`MMlS64Ki%2<7)X{nQMT>Y z!sKI<^!^z7kS~IV`&_JZ3b&~dT9Gxn6Y_@n4b!umsruevm@M4=E71=^LY zr2snMHYK|Ru~G&Np3Btt1vA#{9}c2n&h`A~UwBjS4pa8wDDTR7fTRK-ci)SJ_^dS+ zIYwz;J0H!JAL}(O-##1LUN&&}`@^qr!-QcEn_4Ir@#c3|u>8;+uW6U(p)(kkm%TBa zzal3+1v{R+zbtg@Y<8>krp@H+9&0)kux?n(hgy)Bvw1!k6xplW9L66J$$_l7EPrN*K#8_O;ATOCV zmyzD~IfLKMU@g?dDcsaMT~m6gTjk;Mi>;{UCDyIqEZ#%PLhQ$X7ou4~I*fU64)sJx ztnv)}uZ9+A>ZTK{7rGw-nNnvg=T)`3qqxMF^Q35r2%!}pHl(h8h~8qn1=n#_b74o1;PF8I%id|Fz z#}6rEr%hkH5pGtNGhC6-L-!?NWJTR{bhJ;=a);zB{xW203n3JmTx)Gy^9&MRnnc0q zWkBX;ywG{J%!{r^FSJ*`u5rqdon?GoGNT^8*ZytSUDG+H=R=CeLgjNNl^3b8Ks8c8 z^qbr<6$L2d4)!twWc(VRB7d4}t{KIkP+s`rPX_sLWwTjM?~BjRAj=m~q_-lF3Q8^r zgs4rSP!Jt7r=xw-_8pDAx{2g7H31J&k<;9i5 zHl2!Ywh=Tc{KQp})pBTazcAXVD3bZ!#ii->#U4Jo=AU2-?eVlUL(I-9< zZ~0bq`(3?L4K*gG+zU^rBci&ps~iytivd>rMCdNECNVze&H=goIgBkWNE_X3{jYJd7pUCn_C{+t>p zc`v#eTd%gieaF*Am+kMpB=pM3C+54)R5A>;^Kjoue;z|*<;S40lFYfwg+c}n3+6Hb zPcKoYc98lt#l>II{=3(q{*3Kngc|oCL&7RMG;2rJc>v`-Q&Q<-f30Q2a|wfXaRc8E zyPtXJgONy@P*wQS%9y#2S7|7Q;P{+sB&rXSf*bG;x<1+FRdUvX`yXtyj`_?~2%{sF@^s!Vc{TKghw`^EUfB`Dq|=Yc2E#e23j{KuSZ3wF}SjgizjGXCn+IAxQbl;PoL z-wN8dDH@yeV-?uk1;KUxzdT=1jVx&^XZ!W_HEO~h(5;R_)4Lv62ck6nHZOaxjEeaV z)u=Y5W-WlFPIxR4F6qFG$V1m- zEtopUi=JXu<3A+~l#&cx@DhAy12Ne`?d4apzOaxiErY5!p&8a@;1v{Jtxk9Q0pVJG zWU|^!jT}gu$p~X}mLML!pe^5Vz7=>6b8`dB2L~}E`DJfAT54jQL&-4Z1sXY!E$Y*{ zD!bQhb;Hv&jaBx9!?%xvLC$CLeNwjLNqzTIa^fh@z19+&UW#ANS8X>fA2D$kXX#F$ zjpN1h9oRYZs=m?iHyt1T8Gg&)HPro-9dA;2nNg4ZRmoL-TYby$iQgogew6+}e~JM0 zYM5wB6nxG1d2!*Uqv4;M6)S3i6z$&)lFKTa|te@JhZQT$a0dt6nZ7o8M?QPLtPLSV3d5l9I@M5DcO>|VWhwm`73 zIX)>^0pf*IIWhCJj{#9-j`@mvYKBx3r3_QkcsI}&Dg(u?yY`il~c|3!gf=Lw&7O-|Z>OZ4YU zPh0yHT(9tMTr=V?`8aPd;ZgcZ>aP5+-<$CbV*490*OYd?vD%NMrjxfhVwwi36L)Fy zDvn3`fkUeAjab(JPC(G^-IserG_;8Ab$WRI2$q*2chWtPl9ljGrW!YqHsO2{go|u} z#(R(Hdmh`Cu=vctmo@Dz&ptdmON!SNI0Hmn#8H%p_6d?_k^+TTB=({3dt?%=k&S9V z#pmH}$0wgRr51v$7A?Cg1yF%aG#Rs!<{7hV!XXC%%_m8Ryb*qT85;o#;`nw6Q??JL zFMu-$w1GU0E--6k3}^{vq6_u7BkiK{a&2h5nz?J#cO}2rdI9CXy62j0>*s%Wb175# z)v(18YO(aqP{3emAwQ|A5riWm5$&NL9;f-2H(ckA__9D^v3?&lJ63yyF_*R`yhm z5EvFDa|>~Xq_$i1DVT*6Fsred$_rUAN2399K*5sO%Y`oPlwt9o$ziV2MJT{U!@Azt zDLrIoXTXgopu=16sJ*snd=?UE+I16Tu`u~H2JoQu6uZJAKwB1g`YH;PKWw&$Euvz{ zU;YA-Bo5mxh9qb0>LH{Q5&sNaM@nZH1?SZIZJN=(AMq$3-fYCpP*xXxIC$4c5~YsY zuR!}IP1uj{l9!DwnG>BY_ItRx(9&sYB|HdUb>_#=ULJ_-Y9^P@U>VeD0)6TlXOvm@74wCDT4^ef6oKbas${z+}RuMAkd@apfZa>~F z7U?!0GCHLk-N_-TRvkX@E(67mMAeOIC=h(IVT(bnLeRSPjE~4J<_2yNaAYhM*LWmf zg+iyH;gsP=B*&ah!byNnK1tnboLwa33yzfY%x#F92~$c18|4&V%o(^c$gzVloCD!| z`ifvb5f8z|e>?Po8g_W4EvESvJ6>+@bgDQavEl7qeP1}dlY-H4LX_}MH(-h#a*@nj z?)4Dw8yn2b9Vo7i7w~{_(2>fH@Q(JpVWeyNl+*&+;$=bAw&w#2=K^nZ$@_`O)l9>( zRUOw}OSk@|+emZd3~V^K_m59`Lhm^4V4p<$G0M#QGbg4vAQR@HD}BYH>}Dxv$)_=t z$hZK1BV?!TI5}*g%x?MVei!p!7>?7ec9C^_A0bn1b@(J8DQk}5YiHcW8-qU<;w)!% zRRZ{gWO}%B_VJQ162DfV+Vz~hP8mvJ_yX;B)Ho$|<0-N` z30-BGWbrp;_j%0X27C7~X=F8}X2C_KMxIIY@Xx36FA|Het!mp+=Ng%D&MhXzKcSb^ z=W3~Yv5nOlYL7mn$Ed+0VqMogB;v7mX2Z#xQ7p)Xf*4x!-sCy(s`*HO*5?u4>VNm> z!=6fwd~uWtw!roDZSBz9RU?k;!REzo^C@U%sWDkEcFoiL7Zz~}| zD9BuQ3JQt&{dnem0giW_3pY^4$mu^4AlE`#DeB%`>Az&|7LbI>#50eruK<|A%;(|2yCvDkQkVzGu zQ%3Bi)nC;p_!-!dA8+q~^}JVVGb$hui$fB4#1+E0(`%rP)AJu6_8WBHx8DuOTQ9pX;6n_i9$jvN&6ufAaD~ygy?smxM!0VFrK=VAyQXdK?-X`{Tlo@g= z4600W;CVQ4IY7p4;*j!0KT2iz=d%70 zt^;xgxJ1;aPX~A}QGPo7tQstuDt7>j4*FC+Tu=JNO6G&^-k}mRQ8z}n8C=0HrUW+> zS$xzwrZw9AnA4%oqnK3W|3{b#{Wt~nK|*l=BKL%BJo=u2NGKjoXO1+Y3k^rumX?E6 z)so@-rWP3XlLw?*TaU!>N!~{+$s?@-+KU2e8L@JHJs;WHRtUxG?_;qFOJlhy2!z(< z^Ll?tn)?@Tb3^jmx)l8l%bSI|-(D|JHOt$JH*5y^y%cQ25Hema_J#VNl+q&A=Ba77 zKMKrC#}GfyS(qJ1|_lMJIsct{0O>daU$y*>T>x!MWpJA@1qbAu6rdD!}F{ zxoE6CI+3-nZw|w&h|TEKZayHwN7}f|<#gLhp{cPKwl?tsL&*rY@ye3E&o5n^8;Kfa zByd&Ir7!bxLhB90`XU$QDgQ) z&AzImu-;quKit7DSvd8wGh{3 zI(#khh>`;0|zBf>V;Tg7wF>LYm- z%sps8-tQ@Cf7r(g%nHLI^e66@tOqt`!zSQ$r*4F6z^?Vdg#xg3)I|)I)~6RxvLHN5 z1h15O4PV#9#odW2=9_DnVf6ma7t19%?+NlEKv}P_r-t(O7@W3q1zDT$dDwz(V`If~RkylSNT7Utv{UTM%T`RR4jTY7x&0nE+N+7Z~;gn1W7La2#eSVt7Cn_^8 z06R&1*>3y$s#t8)4^A;GWxe&Ba2@5(X|?nG)%B@0f0#(a_I`MU@|Kob`hA}ECrs=+ zoo3Fik4C@WmsQn}xyY8(e1Xe&yUa{GN~uyWRqc%A<6iq|1DOA;XIi!MW z&y=KPIJlzH-}Q@s_DUNPxL3Zr_KQ*2{e68$+e87GLN4;LjM>-o%9;BX;HSG!geOjh zQr<8{xiqD~h`;ap-p?EJz%kKs^b3Z1F z$h(j$@NiSan{b+;8+pr+3h&vEa+X9@yiGO%gPm^r;`-0vH2Y=cnRUpX_wi1(x`O)wr30 zdo9z<`m(HQ_Ed^bNs1MN;PkZxOM&Ws{~!qd#>2j#V83m)Nn0kbxB%(rgfx6!^T}>s zodrtgIA{TO+COEvMi{0`Ms9AJ{>r;0G%dxxA(T7_AsAEy?yjFy9S1@Ba<8P}y5*v{ z*96rd$d3*dO($}z;0e!p>82Ttq0wd=t)Se&(I31!SNOS43r|NFpz7E{(8z#Ik7@5qy&6RP2Iu5&qOmPeA~! zY(bJJy%t5I{?xDY^Ne=?cEM&7c3ZO__Dzy3Do}&bE$GzB5hl}IBM^DH-K*KrD@q0{ z)#^9pb<>hfyYcw(Y(JYf8vicCspVI2s$yUeqR%oZ=-4W_)VxT3XD|@&5h-rqDW%+* zU)_~1e|&-~aI9F-05}rG%^+9tCQM#<8qrqH-q^w?a|s;zgy8hejtn4 zA+lRsmmN{l6QmhWQKdULE>m5J)q94zZVBJ%IOX?8`U@}gUt6#2N3-YUbrDfc5_57< ziAaG2@obqLo%v2VHD9I&y~BOU+MvEiJR3F{mOEH?L*cxBxzF&^AiR{zY)C-Zzl~Ca z!~EWjo{ zo8edF*Ieq;wdhIz`0rt}LbQ}aQ<7OD9GKD1FIF;b0e3lJ;r%)%?jOG`i)>+{-d*I+00tC&mDx=`>*2a#xn-jOF=z8$LL&4 zrpIh7DZqt$89<`rK z+LzK$R1uIvB(4$v`CwlZnEx8YBq4Y3+^d3ai|Q@*O&iEF9Y}oxh`CT(KGniPaPkT zMd#eka^%;J57)>46$Y_s3pyMw)%Mi=Fu=lggCLeE3hbH3CzJ2$2MNs4e0wKM1oXUY z|3v7s#SFI?oWKg7MBed7S3LPJC!}@{erGx7Oz@ceO)+l`w&fKqhU5N9@G96#^L0}E z7q!S;JnagxXLC?rB0Jk|)`^e4%58}o9H_~&i&fTaraA>$bRB9QWLe)Mxw|ncvFvRM z!P?A84-(2I2oza_B-osBZj$`HA&Nle zUV0Q=ljolUTsTF9+q+CW`MTa9IgcNhe0P5jzEn$)*_aKv9|qk;X%2p#`5IWe7*aQp z<~!M2yxh{2L%-yDdaOrkK6FoNs;ud$u@uN~E&6AY<*yLP87&dwq!=h9KdUWJX_xEp zw8drQtgzPKvM9;d9~vobFUOuHfEx4;4Bne_&6B0`!XGz_?`gqN9qCTE@7f0{Q}S2| zk1kx$spy7yOHIz1?x{*I=74kKUytX3_ukn~Z?0$Lgg9?*ejKGof6wJ=_y@`>C2wipk(sQTH9B^0oQ103Lt5cYiKLL3@(kzinz)SK!fec&guz&ef>H5#PGZqjgk z1GaDRi?fYWDOBl}`pDN$x}g`s>@FcmRaCT8z&a*MNY~pwz~2edB^0Fjymtq7Watvo z`%MWQhtjKzQ0}I3m2eecoqGudrYbqW-}PPLCfcz+VENxJ{_iaQuPy%XcKQF>=|o!^ v!7p66)T!}A`T76)$p1ZQ{y#qq|6L>XF~z02@lsp>ei~18pHw`4_4fY&HJU$e literal 0 HcmV?d00001 diff --git a/mobile/examples/speech_recognition/ios/readme.md b/mobile/examples/speech_recognition/ios/readme.md new file mode 100644 index 0000000000000..b2c858fa0b8dd --- /dev/null +++ b/mobile/examples/speech_recognition/ios/readme.md @@ -0,0 +1,41 @@ +# iOS Speech Recognition Example + +This example shows how to use ORT to do speech recognition using the [Wav2Vec 2.0](https://huggingface.co/transformers/model_doc/wav2vec2.html) model. + +It is heavily inspired by [this PyTorch example](https://github.com/pytorch/ios-demo-app/tree/f2b9aa196821c136d3299b99c5dd592de1fa1776/SpeechRecognition). + +The application lets the user make an audio recording, then recognizes the speech from that recording and displays a transcript. + +![Screenshot](images/screenshot.png) + +## Set up + +### Prerequisites + +See the general prerequisites [here](../../../README.md#General-Prerequisites). + +Additionally, you will need to be able to record audio, either on a simulator or a device. + +### Generate the model + +The model should be generated in this location: `/SpeechRecognition/model` + +See instructions [here](../model/readme.md) for how to generate the model. + +For example, with the model generation script dependencies installed, from this directory, run: + +```bash +../model/gen_model.sh ./SpeechRecognition/model +``` + +### Install the Pod dependencies + +From this directory, run: + +```bash +pod install +``` + +## Build and run + +Open the generated SpeechRecognition.xcworkspace file in Xcode to build and run the example. diff --git a/mobile/examples/speech_recognition/model/gen_model.sh b/mobile/examples/speech_recognition/model/gen_model.sh new file mode 100755 index 0000000000000..368b6d28adf36 --- /dev/null +++ b/mobile/examples/speech_recognition/model/gen_model.sh @@ -0,0 +1,14 @@ +#!/bin/bash + +set -e + +OUTPUT_DIR=${1:?"Please specify an output directory."} + +# Get directory this script is in +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" + +mkdir -p ${OUTPUT_DIR} +cd ${OUTPUT_DIR} + +python3 ${DIR}/wav2vec2_gen.py +python3 -m onnxruntime.tools.convert_onnx_models_to_ort . diff --git a/mobile/examples/speech_recognition/model/readme.md b/mobile/examples/speech_recognition/model/readme.md new file mode 100644 index 0000000000000..852d439b3c315 --- /dev/null +++ b/mobile/examples/speech_recognition/model/readme.md @@ -0,0 +1,31 @@ +# Wav2Vec 2.0 + +This example uses the [Wav2Vec 2.0](https://huggingface.co/transformers/model_doc/wav2vec2.html) model for speech recognition. + +The model generation script was adapted from [this PyTorch example script](https://github.com/pytorch/ios-demo-app/blob/f2b9aa196821c136d3299b99c5dd592de1fa1776/SpeechRecognition/create_wav2vec2.py). + +## How to generate the model + +### Install the Python requirements + +It is a good idea to use a separate Python environment instead of the system Python. +E.g., a new Conda environment. + +Run: + +```bash +python3 -m pip install -r /requirements.txt +``` + +### Run the model generation script + +Run: + +```bash +/gen_model.sh +``` + +The model will be generated in the given output directory. + +In particular, .onnx and .ort model files will be generated. +The .ort model file can be used by ONNX Runtime Mobile. diff --git a/mobile/examples/speech_recognition/model/requirements.txt b/mobile/examples/speech_recognition/model/requirements.txt new file mode 100644 index 0000000000000..e65de2c8151b9 --- /dev/null +++ b/mobile/examples/speech_recognition/model/requirements.txt @@ -0,0 +1,5 @@ +onnx>=1.9.0 +onnxruntime>=1.8.0 +torch==1.9.0 +torchaudio==0.9.0 +transformers==4.6.1 diff --git a/mobile/examples/speech_recognition/model/wav2vec2_gen.py b/mobile/examples/speech_recognition/model/wav2vec2_gen.py new file mode 100644 index 0000000000000..c92704c345e06 --- /dev/null +++ b/mobile/examples/speech_recognition/model/wav2vec2_gen.py @@ -0,0 +1,22 @@ +# this script was adapted from here: +# https://github.com/pytorch/ios-demo-app/blob/f2b9aa196821c136d3299b99c5dd592de1fa1776/SpeechRecognition/create_wav2vec2.py + +import torch +from torchaudio.models.wav2vec2.utils.import_huggingface import import_huggingface_model +from transformers import Wav2Vec2ForCTC + +# Load Wav2Vec2 pretrained model from Hugging Face Hub +model = Wav2Vec2ForCTC.from_pretrained("facebook/wav2vec2-base-960h") + +# Convert the model to torchaudio format +model = import_huggingface_model(model) + +model = model.eval() + +input = torch.zeros(1, 1024) + +torch.onnx.export(model, input, "wav2vec2-base-960h.onnx", + input_names=["input"], + output_names=["output"], + dynamic_axes={"input": [1], "output": [1]}, + opset_version=13)