1
+ #! /bin/bash
2
+ # Copyright (c) Meta Platforms, Inc. and affiliates.
3
+ # All rights reserved.
4
+ #
5
+ # This source code is licensed under the BSD-style license found in the
6
+ # LICENSE file in the root directory of this source tree.
7
+
8
+ set -exu
9
+
10
+ BUILD_TYPE=${1:- Debug}
11
+ BUILD_DIR=${3:- cmake-out}
12
+ MODEL_DIR=examples/models/phi-3-mini
13
+
14
+ echo " Building with BUILD_TYPE: $BUILD_TYPE , BUILD_DIR: $BUILD_DIR "
15
+
16
+ if [[ -z " ${PYTHON_EXECUTABLE:- } " ]]; then
17
+ PYTHON_EXECUTABLE=python3
18
+ fi
19
+
20
+ # Number of processes for a parallel build
21
+ NPROC=8
22
+ if hash nproc & > /dev/null; then NPROC=$( nproc) ; fi
23
+
24
+ cmake_install_executorch_libraries () {
25
+ cmake -DPYTHON_EXECUTABLE=python \
26
+ -DCMAKE_INSTALL_PREFIX=${BUILD_DIR} \
27
+ -DEXECUTORCH_ENABLE_LOGGING=1 \
28
+ -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
29
+ -DEXECUTORCH_BUILD_EXTENSION_DATA_LOADER=ON \
30
+ -DEXECUTORCH_BUILD_EXTENSION_MODULE=ON \
31
+ -DEXECUTORCH_BUILD_EXTENSION_TENSOR=ON \
32
+ -DEXECUTORCH_BUILD_XNNPACK=ON \
33
+ -DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON \
34
+ -DEXECUTORCH_BUILD_KERNELS_OPTIMIZED=ON \
35
+ -DEXECUTORCH_BUILD_KERNELS_CUSTOM=ON \
36
+ -B${BUILD_DIR} .
37
+
38
+ cmake --build ${BUILD_DIR} -j${NPROC} --target install --config ${BUILD_TYPE}
39
+ }
40
+
41
+ cmake_build_phi_3_mini () {
42
+ python_lib=$( $PYTHON_EXECUTABLE -c ' from distutils.sysconfig import get_python_lib; print(get_python_lib())' )
43
+
44
+ cmake -DPYTHON_EXECUTABLE=$PYTHON_EXECUTABLE \
45
+ -DCMAKE_INSTALL_PREFIX=${BUILD_DIR} \
46
+ -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
47
+ -DEXECUTORCH_BUILD_KERNELS_CUSTOM=ON \
48
+ -DEXECUTORCH_BUILD_KERNELS_OPTIMIZED=ON \
49
+ -DEXECUTORCH_BUILD_XNNPACK=ON \
50
+ -DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON \
51
+ -B${BUILD_DIR} /${MODEL_DIR} \
52
+ ${MODEL_DIR}
53
+
54
+ cmake --build ${BUILD_DIR} /${MODEL_DIR} -j${NPROC} --config ${BUILD_TYPE}
55
+ }
56
+
57
+ # Download and convert tokenizer.model
58
+ prepare_tokenizer () {
59
+ echo " Downloading and converting tokenizer.model"
60
+ wget -O tokenizer.model " https://huggingface.co/microsoft/Phi-3-mini-128k-instruct/resolve/main/tokenizer.model?download=true"
61
+ $PYTHON_EXECUTABLE -m executorch.extension.llm.tokenizer.tokenizer -t tokenizer.model -o tokenizer.bin
62
+ }
63
+
64
+ # Export phi-3-mini model to pte
65
+ export_phi_3_mini () {
66
+ echo " Exporting phi-3-mini. This will take a few minutes"
67
+ $PYTHON_EXECUTABLE -m executorch.examples.models.phi-3-mini.export_phi-3-mini -c " 4k" -s 128 -o phi-3-mini.pte
68
+ }
69
+
70
+ run_and_verify () {
71
+ NOW=$( date +" %H:%M:%S" )
72
+ echo " Starting to run phi-3-mini runner at ${NOW} "
73
+ if [[ ! -f " phi-3-mini.pte" ]]; then
74
+ echo " Export failed. Abort"
75
+ exit 1
76
+ fi
77
+ if [[ ! -f " tokenizer.bin" ]]; then
78
+ echo " tokenizer.bin is missing."
79
+ exit 1
80
+ fi
81
+
82
+ RUNTIME_ARGS=" --model_path=phi-3-mini.pte \
83
+ --tokenizer_path=tokenizer.bin \
84
+ --prompt=\" <|system|>You are a helpful assistant.<|end|><|user|>What is the capital of France?<|end|><|assistant|>\" \
85
+ --temperature=0 \
86
+ --seq_len=128"
87
+
88
+ ${BUILD_DIR} /${MODEL_DIR} /phi_3_mini_runner ${RUNTIME_ARGS} > result.txt
89
+
90
+ # verify result.txt
91
+ RESULT=$( cat result.txt)
92
+ EXPECTED_RESULT=" The capital of France is Paris."
93
+ if [[ " ${RESULT} " == * " ${EXPECTED_RESULT} " * ]]; then
94
+ echo " Expected result prefix: ${EXPECTED_RESULT} "
95
+ echo " Actual result: ${RESULT} "
96
+ echo " Success"
97
+ exit 0
98
+ else
99
+ echo " Expected result prefix: ${EXPECTED_RESULT} "
100
+ echo " Actual result: ${RESULT} "
101
+ echo " Failure; results not the same"
102
+ exit 1
103
+ fi
104
+ }
105
+
106
+ # Step 1. Build ExecuTorch and phi-3-mini runner
107
+ cmake_install_executorch_libraries
108
+ cmake_build_phi_3_mini
109
+
110
+ # Step 2. Export the tokenizer and model
111
+ prepare_tokenizer
112
+ export_phi_3_mini
113
+
114
+ # Step 3. Run and verify result
115
+ run_and_verify
0 commit comments