Skip to content

Commit f1e673f

Browse files
cccclaifacebook-github-bot
authored andcommitted
add mps stories end to end in ci (#4137)
Summary: Add the end to end test for stories fp32 in ci Pull Request resolved: #4137 Reviewed By: kimishpatel Differential Revision: D59305037 Pulled By: cccclai fbshipit-source-id: 9c132721e1120c00f4cb969b02d494879e181f32
1 parent f32d707 commit f1e673f

File tree

2 files changed

+30
-2
lines changed

2 files changed

+30
-2
lines changed

.ci/scripts/test_llama.sh

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -55,6 +55,14 @@ else
5555
QE=OFF
5656
fi
5757

58+
if [[ "${MODE}" =~ .*mps.* ]]; then
59+
MPS=ON
60+
else
61+
MPS=OFF
62+
fi
63+
64+
echo "MPS option ${MPS}"
65+
5866
if [[ -z "${BUCK:-}" ]]; then
5967
BUCK=buck2
6068
fi
@@ -77,6 +85,7 @@ cmake_install_executorch_libraries() {
7785
-DEXECUTORCH_BUILD_KERNELS_OPTIMIZED=ON \
7886
-DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON \
7987
-DEXECUTORCH_BUILD_XNNPACK="$XNNPACK" \
88+
-DEXECUTORCH_BUILD_MPS="$MPS" \
8089
-DPYTHON_EXECUTABLE="$PYTHON_EXECUTABLE" \
8190
-Bcmake-out .
8291
cmake --build cmake-out -j9 --target install --config Debug
@@ -142,6 +151,9 @@ fi
142151
if [[ "${QE}" == "ON" ]]; then
143152
EXPORT_ARGS="${EXPORT_ARGS} --embedding-quantize 8,1024"
144153
fi
154+
if [[ "${MPS}" == "ON" ]]; then
155+
EXPORT_ARGS="${EXPORT_ARGS} -kv -v --mps --disable_dynamic_shape"
156+
fi
145157
# Add dynamically linked library location
146158
$PYTHON_EXECUTABLE -m examples.models.llama2.export_llama ${EXPORT_ARGS}
147159

.github/workflows/trunk.yml

Lines changed: 18 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -225,7 +225,7 @@ jobs:
225225
matrix:
226226
dtype: [fp32]
227227
build-tool: [buck2, cmake]
228-
mode: [portable, xnnpack+kv+custom]
228+
mode: [portable, xnnpack+kv+custom, mps]
229229
fail-fast: false
230230
with:
231231
runner: macos-m1-stable
@@ -234,15 +234,31 @@ jobs:
234234
ref: ${{ github.event_name == 'pull_request' && github.event.pull_request.head.sha || github.sha }}
235235
timeout: 900
236236
script: |
237-
bash .ci/scripts/setup-conda.sh
238237
239238
DTYPE=${{ matrix.dtype }}
240239
BUILD_TOOL=${{ matrix.build-tool }}
241240
MODE=${{ matrix.mode }}
242241
242+
if [[ "${BUILD_TOOL}" == "buck2" ]]; then
243+
# TODO: Will add more modes that don't support buck2
244+
if [[ "${MODE}" == "mps" ]]; then
245+
echo "mps doesn't support buck2."
246+
exit 0
247+
fi
248+
fi
249+
250+
bash .ci/scripts/setup-conda.sh
251+
243252
# Setup executorch
244253
PYTHON_EXECUTABLE=python ${CONDA_RUN} bash .ci/scripts/setup-macos.sh "${BUILD_TOOL}"
245254
255+
if [[ "${MODE}" == "mps" ]]; then
256+
PYTHON_EXECUTABLE=python ${CONDA_RUN} bash backends/apple/mps/install_requirements.sh
257+
echo "Finishing installing mps."
258+
else
259+
echo "Not mps mode, skip installing mps."
260+
fi
261+
246262
# Install requirements for export_llama
247263
PYTHON_EXECUTABLE=python ${CONDA_RUN} bash examples/models/llama2/install_requirements.sh
248264
# Test llama2

0 commit comments

Comments
 (0)