1
+ #! /bin/bash
2
+ # Copyright (c) Meta Platforms, Inc. and affiliates.
3
+ # All rights reserved.
4
+ #
5
+ # This source code is licensed under the BSD-style license found in the
6
+ # LICENSE file in the root directory of this source tree.
7
+
8
+ set -exu
9
+
10
+ BUILD_TYPE=${1:- Debug}
11
+ BUILD_DIR=${3:- cmake-out}
12
+ MODEL_DIR=examples/models/phi-3-mini
13
+
14
+ echo " Building with BUILD_TYPE: $BUILD_TYPE , BUILD_DIR: $BUILD_DIR "
15
+
16
+ if [[ -z " ${PYTHON_EXECUTABLE:- } " ]]; then
17
+ PYTHON_EXECUTABLE=python3
18
+ fi
19
+
20
+ # Number of processes for a parallel build
21
+ NPROC=8
22
+ if hash nproc & > /dev/null; then NPROC=$( nproc) ; fi
23
+
24
+ cmake_install_executorch_libraries () {
25
+ cmake -DPYTHON_EXECUTABLE=python \
26
+ -DCMAKE_INSTALL_PREFIX=${BUILD_DIR} \
27
+ -DEXECUTORCH_ENABLE_LOGGING=1 \
28
+ -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
29
+ -DEXECUTORCH_BUILD_EXTENSION_DATA_LOADER=ON \
30
+ -DEXECUTORCH_BUILD_EXTENSION_MODULE=ON \
31
+ -DEXECUTORCH_BUILD_EXTENSION_TENSOR=ON \
32
+ -DEXECUTORCH_BUILD_XNNPACK=ON \
33
+ -DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON \
34
+ -DEXECUTORCH_BUILD_KERNELS_OPTIMIZED=ON \
35
+ -DEXECUTORCH_BUILD_KERNELS_CUSTOM=ON \
36
+ -B${BUILD_DIR} .
37
+
38
+ cmake --build ${BUILD_DIR} -j${NPROC} --target install --config ${BUILD_TYPE}
39
+ }
40
+
41
+ cmake_build_phi_3_mini () {
42
+ cmake -DPYTHON_EXECUTABLE=$PYTHON_EXECUTABLE \
43
+ -DCMAKE_INSTALL_PREFIX=${BUILD_DIR} \
44
+ -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \
45
+ -DEXECUTORCH_BUILD_KERNELS_CUSTOM=ON \
46
+ -DEXECUTORCH_BUILD_KERNELS_OPTIMIZED=ON \
47
+ -DEXECUTORCH_BUILD_XNNPACK=ON \
48
+ -DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON \
49
+ -B${BUILD_DIR} /${MODEL_DIR} \
50
+ ${MODEL_DIR}
51
+
52
+ cmake --build ${BUILD_DIR} /${MODEL_DIR} -j${NPROC} --config ${BUILD_TYPE}
53
+ }
54
+
55
+ # Download and convert tokenizer.model
56
+ prepare_tokenizer () {
57
+ echo " Downloading and converting tokenizer.model"
58
+ wget -O tokenizer.model " https://huggingface.co/microsoft/Phi-3-mini-128k-instruct/resolve/main/tokenizer.model?download=true"
59
+ $PYTHON_EXECUTABLE -m executorch.extension.llm.tokenizer.tokenizer -t tokenizer.model -o tokenizer.bin
60
+ }
61
+
62
+ # Export phi-3-mini model to pte
63
+ export_phi_3_mini () {
64
+ echo " Exporting phi-3-mini. This will take a few minutes"
65
+ $PYTHON_EXECUTABLE -m executorch.examples.models.phi-3-mini.export_phi-3-mini -c " 4k" -s 128 -o phi-3-mini.pte
66
+ }
67
+
68
+ run_and_verify () {
69
+ NOW=$( date +" %H:%M:%S" )
70
+ echo " Starting to run phi-3-mini runner at ${NOW} "
71
+ if [[ ! -f " phi-3-mini.pte" ]]; then
72
+ echo " Export failed. Abort"
73
+ exit 1
74
+ fi
75
+ if [[ ! -f " tokenizer.bin" ]]; then
76
+ echo " tokenizer.bin is missing."
77
+ exit 1
78
+ fi
79
+
80
+ RUNTIME_ARGS=" --model_path=phi-3-mini.pte \
81
+ --tokenizer_path=tokenizer.bin \
82
+ --prompt=\" <|system|>You are a helpful assistant.<|end|><|user|>What is the capital of France?<|end|><|assistant|>\" \
83
+ --temperature=0 \
84
+ --seq_len=128"
85
+
86
+ ${BUILD_DIR} /${MODEL_DIR} /phi_3_mini_runner ${RUNTIME_ARGS} > result.txt
87
+
88
+ # verify result.txt
89
+ RESULT=$( cat result.txt)
90
+ EXPECTED_RESULT=" The capital of France is Paris."
91
+ if [[ " ${RESULT} " == * " ${EXPECTED_RESULT} " * ]]; then
92
+ echo " Expected result prefix: ${EXPECTED_RESULT} "
93
+ echo " Actual result: ${RESULT} "
94
+ echo " Success"
95
+ exit 0
96
+ else
97
+ echo " Expected result prefix: ${EXPECTED_RESULT} "
98
+ echo " Actual result: ${RESULT} "
99
+ echo " Failure; results not the same"
100
+ exit 1
101
+ fi
102
+ }
103
+
104
+ # Step 1. Build ExecuTorch and phi-3-mini runner
105
+ cmake_install_executorch_libraries
106
+ cmake_build_phi_3_mini
107
+
108
+ # Step 2. Export the tokenizer and model
109
+ prepare_tokenizer
110
+ export_phi_3_mini
111
+
112
+ # Step 3. Run and verify result
113
+ run_and_verify
0 commit comments