diff --git a/runtime/android/README.md b/runtime/android/README.md new file mode 100644 index 0000000..3a4a191 --- /dev/null +++ b/runtime/android/README.md @@ -0,0 +1,27 @@ +# Usage + +Most of AI engineers are not familiar with Android development, this is a simple ‘how to’. + +1. Train your model with your data + +2. Export pytorch model to onnx model + +3. Convert onnx model for mobile deployment + +```bash +python -m onnxruntime.tools.convert_onnx_models_to_ort your-model.onnx +``` +you will get `your-model.ort` and `your-model.with_runtime_opt.ort` + + +4. Install Android Studio and open path of wekws/runtime/android and build + +*NOTE:* The default feature_dim in code is 40, if your model’s is 80, change it here `./app/src/main/cpp/wekws.cc` + +```C++ + feature_config = std::make_shared(40, 16000); // 40 -> 80 +``` + +It’s also can be built on Linux by runing `bash ./gradlew build` + +5. Install `app/build/outputs/apk/debug/app-debug.apk` to your phone and try it. diff --git a/runtime/android/app/build.gradle b/runtime/android/app/build.gradle index 5984fbb..2aa287b 100644 --- a/runtime/android/app/build.gradle +++ b/runtime/android/app/build.gradle @@ -45,8 +45,8 @@ dependencies { implementation 'androidx.appcompat:appcompat:1.3.0' implementation 'com.google.android.material:material:1.4.0' implementation 'androidx.constraintlayout:constraintlayout:2.0.4' - implementation 'com.microsoft.onnxruntime:onnxruntime-mobile:latest.release' - extractForNativeBuild 'com.microsoft.onnxruntime:onnxruntime-mobile:latest.release' + implementation 'com.microsoft.onnxruntime:onnxruntime-mobile:1.12.1' + extractForNativeBuild 'com.microsoft.onnxruntime:onnxruntime-mobile:1.12.1' testImplementation 'junit:junit:4.13.2' androidTestImplementation 'androidx.test.ext:junit:1.1.3' androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0' @@ -70,4 +70,4 @@ tasks.whenTaskAdded { task -> if (task.name.contains('externalNativeBuild')) { task.dependsOn(extractAARForNativeBuild) } -} \ No newline at end of file +}