|
| 1 | +#!/bin/bash |
| 2 | +# Copyright (c) Meta Platforms, Inc. and affiliates. |
| 3 | +# All rights reserved. |
| 4 | +# |
| 5 | +# This source code is licensed under the BSD-style license found in the |
| 6 | +# LICENSE file in the root directory of this source tree. |
| 7 | + |
| 8 | +set -exu |
| 9 | + |
| 10 | +BUILD_TYPE=${1:-Release} |
| 11 | +ENABLE_KLEIDI=${2:-ON} |
| 12 | +NDK=${ANDROID_NDK:-/opt/ndk} |
| 13 | +BUILD_DIR=cmake-android-out-${BUILD_TYPE}-Kleidi-${ENABLE_KLEIDI} |
| 14 | + |
| 15 | +install_executorch_and_backend_lib() { |
| 16 | + echo "Installing executorch and xnnpack backend" |
| 17 | + ANDROID_ABI=arm64-v8a |
| 18 | + cmake \ |
| 19 | + -DCMAKE_TOOLCHAIN_FILE="${NDK}/build/cmake/android.toolchain.cmake" \ |
| 20 | + -DEXECUTORCH_ENABLE_LOGGING=ON \ |
| 21 | + -DANDROID_ABI="${ANDROID_ABI}" \ |
| 22 | + -DANDROID_PLATFORM=android-23 \ |
| 23 | + -DCMAKE_INSTALL_PREFIX=${BUILD_DIR} \ |
| 24 | + -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \ |
| 25 | + -DEXECUTORCH_BUILD_EXTENSION_DATA_LOADER=ON \ |
| 26 | + -DEXECUTORCH_BUILD_EXTENSION_MODULE=ON \ |
| 27 | + -DEXECUTORCH_BUILD_EXTENSION_TENSOR=ON \ |
| 28 | + -DEXECUTORCH_BUILD_XNNPACK=ON \ |
| 29 | + -DEXECUTORCH_XNNPACK_ENABLE_KLEIDI=${ENABLE_KLEIDI} \ |
| 30 | + -DEXECUTORCH_XNNPACK_WORKSPACE_SHARING=ON \ |
| 31 | + -DEXECUTORCH_BUILD_KERNELS_OPTIMIZED=ON \ |
| 32 | + -DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON \ |
| 33 | + -DEXECUTORCH_BUILD_KERNELS_CUSTOM=ON \ |
| 34 | + -DXNNPACK_ENABLE_ARM_BF16=OFF \ |
| 35 | + -B${BUILD_DIR} . |
| 36 | + |
| 37 | + cmake --build ${BUILD_DIR} -j4 --target install --config ${BUILD_TYPE} |
| 38 | +} |
| 39 | + |
| 40 | +build_llama_runner() { |
| 41 | + echo "Building llama runner for Android..." |
| 42 | + ANDROID_ABI=arm64-v8a |
| 43 | +cmake \ |
| 44 | + -DCMAKE_TOOLCHAIN_FILE="$NDK"/build/cmake/android.toolchain.cmake \ |
| 45 | + -DEXECUTORCH_ENABLE_LOGGING=ON \ |
| 46 | + -DANDROID_ABI="${ANDROID_ABI}" \ |
| 47 | + -DANDROID_PLATFORM=android-23 \ |
| 48 | + -DCMAKE_INSTALL_PREFIX=${BUILD_DIR} \ |
| 49 | + -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DPYTHON_EXECUTABLE=python \ |
| 50 | + -DEXECUTORCH_BUILD_XNNPACK=ON \ |
| 51 | + -DEXECUTORCH_XNNPACK_ENABLE_KLEIDI=${ENABLE_KLEIDI} \ |
| 52 | + -DEXECUTORCH_XNNPACK_WORKSPACE_SHARING=ON \ |
| 53 | + -DEXECUTORCH_BUILD_KERNELS_OPTIMIZED=ON \ |
| 54 | + -DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON \ |
| 55 | + -DEXECUTORCH_BUILD_KERNELS_CUSTOM=ON \ |
| 56 | + -B${BUILD_DIR}/examples/models/llama2 examples/models/llama2 |
| 57 | + |
| 58 | + cmake --build ${BUILD_DIR}/examples/models/llama2 -j4 --config ${BUILD_TYPE} |
| 59 | +} |
| 60 | + |
| 61 | +install_executorch_and_backend_lib |
| 62 | +build_llama_runner |
0 commit comments