File tree Expand file tree Collapse file tree 4 files changed +25
-10
lines changed
examples/apple/mps/scripts Expand file tree Collapse file tree 4 files changed +25
-10
lines changed Original file line number Diff line number Diff line change @@ -191,6 +191,11 @@ test_model_with_coreml() {
191
191
EXPORTED_MODEL=$( find " ." -type f -name " ${MODEL_NAME} *.pte" -print -quit)
192
192
}
193
193
194
+ test_model_with_mps () {
195
+ " ${PYTHON_EXECUTABLE} " -m examples.apple.mps.scripts.mps_example --model_name=" ${MODEL_NAME} " --use_fp16
196
+ EXPORTED_MODEL=$( find " ." -type f -name " ${MODEL_NAME} *.pte" -print -quit)
197
+ }
198
+
194
199
if [[ " ${BACKEND} " == " portable" ]]; then
195
200
echo " Testing ${MODEL_NAME} with portable kernels..."
196
201
test_model
@@ -206,6 +211,12 @@ elif [[ "${BACKEND}" == "coreml" ]]; then
206
211
if [[ $? -eq 0 ]]; then
207
212
prepare_artifacts_upload
208
213
fi
214
+ elif [[ " ${BACKEND} " == " mps" ]]; then
215
+ echo " Testing ${MODEL_NAME} with mps..."
216
+ test_model_with_mps
217
+ if [[ $? -eq 0 ]]; then
218
+ prepare_artifacts_upload
219
+ fi
209
220
elif [[ " ${BACKEND} " == " xnnpack" ]]; then
210
221
echo " Testing ${MODEL_NAME} with xnnpack..."
211
222
WITH_QUANTIZATION=true
Original file line number Diff line number Diff line change @@ -169,6 +169,8 @@ jobs:
169
169
DELEGATE_CONFIG="xnnpack+custom+qe"
170
170
elif [[ ${{ matrix.delegate }} == "coreml" ]]; then
171
171
DELEGATE_CONFIG="coreml"
172
+ elif [[ ${{ matrix.delegate }} == "mps" ]]; then
173
+ DELEGATE_CONFIG="mps"
172
174
fi
173
175
PYTHON_EXECUTABLE=python ${CONDA_RUN} --no-capture-output \
174
176
bash .ci/scripts/test_llama.sh "${{ matrix.model }}" "${BUILD_MODE}" "${DTYPE}" "${DELEGATE_CONFIG}" "${ARTIFACTS_DIR_NAME}"
Original file line number Diff line number Diff line change @@ -312,7 +312,7 @@ jobs:
312
312
PYTHON_EXECUTABLE=python bash .ci/scripts/test_model.sh ${{ matrix.model }} "cmake" "qnn"
313
313
314
314
test-coreml-model :
315
- name : test-coreml-model
315
+ name : test-coreml-and-mps- model
316
316
uses : pytorch/test-infra/.github/workflows/macos_job.yml@main
317
317
strategy :
318
318
fail-fast : false
@@ -324,20 +324,25 @@ jobs:
324
324
timeout : 90
325
325
script : |
326
326
BUILD_TOOL=cmake
327
- BACKEND=coreml
328
327
329
328
bash .ci/scripts/setup-conda.sh
330
329
331
330
# Setup MacOS dependencies as there is no Docker support on MacOS atm
332
331
PYTHON_EXECUTABLE=python ${CONDA_RUN} bash .ci/scripts/setup-macos.sh "${BUILD_TOOL}"
333
332
PYTHON_EXECUTABLE=python ${CONDA_RUN} bash backends/apple/coreml/scripts/install_requirements.sh
334
333
echo "Finishing installing coreml."
334
+ PYTHON_EXECUTABLE=python ${CONDA_RUN} bash backends/apple/mps/install_requirements.sh
335
+ echo "Finishing installing mps."
335
336
336
337
# Build and test coreml model
337
- MODELS=(mv3 ic4 resnet50 edsr mobilebert w2l)
338
+ MODELS=(mv3 ic4 resnet50 edsr mobilebert w2l vit )
338
339
for MODEL_NAME in "${MODELS[@]}"; do
339
340
echo "::group::Exporting coreml model: $MODEL_NAME"
340
- PYTHON_EXECUTABLE=python ${CONDA_RUN} bash .ci/scripts/test_model.sh "${MODEL_NAME}" "${BUILD_TOOL}" "${BACKEND}"
341
+ PYTHON_EXECUTABLE=python ${CONDA_RUN} bash .ci/scripts/test_model.sh "${MODEL_NAME}" "${BUILD_TOOL}" "coreml"
342
+ echo "::endgroup::"
343
+
344
+ echo "::group::Exporting mps model: $MODEL_NAME"
345
+ PYTHON_EXECUTABLE=python ${CONDA_RUN} bash .ci/scripts/test_model.sh "${MODEL_NAME}" "${BUILD_TOOL}" "mps"
341
346
echo "::endgroup::"
342
347
done
343
348
Original file line number Diff line number Diff line change @@ -195,18 +195,15 @@ def get_model_config(args):
195
195
edge_compile_config = exir .EdgeCompileConfig (_check_ir_validity = False ),
196
196
).to_executorch (config = ExecutorchBackendConfig (extract_delegate_segments = False ))
197
197
198
- model_name = f"{ args .model_name } _mps"
198
+ dtype = "fp16" if args .use_fp16 else "fp32"
199
+ model_name = f"{ args .model_name } _mps_{ dtype } "
199
200
200
201
if args .bundled :
201
202
expected_output = model (* example_inputs )
202
203
bundled_program_buffer = get_bundled_program (
203
204
executorch_program , example_inputs , expected_output
204
205
)
205
- model_name = f"{ model_name } _bundled"
206
- extension = "fp16"
207
- if not args .use_fp16 :
208
- extension = "fp32"
209
- model_name = f"{ model_name } _{ extension } .pte"
206
+ model_name = f"{ model_name } _bundled.pte"
210
207
211
208
if args .generate_etrecord :
212
209
etrecord_path = "etrecord.bin"
You can’t perform that action at this time.
0 commit comments