Skip to content

Commit c8f4171

Browse files
committed
Update on "[executorch][serialization] Refactor flatbuffer utils into separate file"
For usage in extension/flat_tensor/serialize. Differential Revision: [D66854756](https://our.internmc.facebook.com/intern/diff/D66854756/) [ghstack-poisoned]
2 parents 9802253 + f74afd2 commit c8f4171

File tree

359 files changed

+7034
-1905
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

359 files changed

+7034
-1905
lines changed

.ci/docker/ci_commit_pins/pytorch.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
19eff28ff3f19b50da46f5a9ff5f4d4d213806fe
1+
2ea4b56ec872424e486c4fe2d55da061067a2ed3

.ci/docker/common/install_pytorch.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ install_pytorch_and_domains() {
2626

2727
chown -R ci-user .
2828

29-
export _GLIBCXX_USE_CXX11_ABI=0
29+
export _GLIBCXX_USE_CXX11_ABI=1
3030
# Then build and install PyTorch
3131
conda_run python setup.py bdist_wheel
3232
pip_install "$(echo dist/*.whl)"

.ci/docker/ubuntu/Dockerfile

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -79,9 +79,6 @@ RUN if [ -n "${ANDROID_NDK_VERSION}" ]; then bash ./install_android.sh; fi
7979
RUN rm install_android.sh
8080

8181
ARG ARM_SDK
82-
COPY --chown=ci-user:ci-user ./arm /opt/arm
83-
# Set up ARM SDK if needed
84-
RUN if [ -n "${ARM_SDK}" ]; then git config --global user.email "[email protected]"; git config --global user.name "OSS CI"; bash /opt/arm/setup.sh --i-agree-to-the-contained-eula /opt/arm-sdk; chown -R ci-user:ci-user /opt/arm-sdk; fi
8582

8683
ARG QNN_SDK
8784

.ci/scripts/build_llama_android.sh

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,8 @@ source "$(dirname "${BASH_SOURCE[0]}")/utils.sh"
1212

1313
install_executorch_and_backend_lib() {
1414
echo "Installing executorch and xnnpack backend"
15-
rm -rf cmake-android-out && mkdir cmake-android-out
15+
clean_executorch_install_folders
16+
mkdir cmake-android-out
1617
ANDROID_NDK=/opt/ndk
1718
BUCK2=buck2
1819
ANDROID_ABI=arm64-v8a

.ci/scripts/download_hf_hub.sh

Lines changed: 95 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,95 @@
1+
#!/bin/bash
2+
3+
# Function to download files from the Hugging Face Hub
4+
# Arguments:
5+
# 1. model_id: The Hugging Face repository ID (e.g., "organization/model_name")
6+
# 2. subdir: The optional subdirectory in the repo to look for files (pass "" if not used)
7+
# 3. file_names: A space-separated list of filenames to be downloaded
8+
# Returns:
9+
# The directory containing the downloaded files
10+
function download_hf_files() {
11+
local model_id="$1"
12+
local subdir="$2"
13+
shift 2
14+
local file_names=("$@") # Capture all remaining arguments as an array
15+
16+
local download_dir
17+
18+
# Use the first file to determine the download directory
19+
download_dir=$(python3 -c "
20+
from huggingface_hub import hf_hub_download
21+
# Download the first file and get its directory
22+
path = hf_hub_download(
23+
repo_id='${model_id}',
24+
filename='${subdir:+${subdir}/}${file_names[0]}'
25+
)
26+
import os
27+
print(os.path.dirname(path))")
28+
29+
if [ $? -ne 0 ]; then
30+
echo "Error: Failed to determine download directory from ${file_names[0]}" >&2
31+
return 1
32+
fi
33+
34+
# Download remaining files into the same directory
35+
for file_name in "${file_names[@]:1}"; do
36+
python3 -c "
37+
from huggingface_hub import hf_hub_download
38+
# Download the file
39+
hf_hub_download(
40+
repo_id='${model_id}',
41+
filename='${subdir:+${subdir}/}${file_name}'
42+
)"
43+
44+
if [ $? -ne 0 ]; then
45+
echo "Error: Failed to download ${file_name} from ${model_id}" >&2
46+
return 1
47+
fi
48+
done
49+
50+
# Return the directory containing the downloaded files
51+
echo "$download_dir"
52+
}
53+
54+
# Check if script is called directly
55+
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
56+
# Parse arguments from CLI
57+
while [[ $# -gt 0 ]]; do
58+
case $1 in
59+
--model_id)
60+
MODEL_ID="$2"
61+
shift 2
62+
;;
63+
--subdir)
64+
SUBDIR="$2"
65+
shift 2
66+
;;
67+
--files)
68+
shift
69+
FILES_TO_DOWNLOAD=()
70+
while [[ $# -gt 0 && $1 != --* ]]; do
71+
FILES_TO_DOWNLOAD+=("$1")
72+
shift
73+
done
74+
;;
75+
*)
76+
echo "Unknown option: $1" >&2
77+
exit 1
78+
;;
79+
esac
80+
done
81+
82+
# Validate required arguments
83+
if [ -z "$MODEL_ID" ] || [ ${#FILES_TO_DOWNLOAD[@]} -eq 0 ]; then
84+
echo "Usage: $0 --model_id <model_id> --subdir <subdir> --files <file1> [<file2> ...]" >&2
85+
exit 1
86+
fi
87+
88+
# Call the function
89+
DOWNLOAD_DIR=$(download_hf_files "$MODEL_ID" "$SUBDIR" "${FILES_TO_DOWNLOAD[@]}")
90+
if [ $? -eq 0 ]; then
91+
echo "$DOWNLOAD_DIR"
92+
else
93+
exit 1
94+
fi
95+
fi
Lines changed: 224 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,224 @@
1+
#!/usr/bin/env python
2+
# Copyright (c) Meta Platforms, Inc. and affiliates.
3+
# All rights reserved.
4+
#
5+
# This source code is licensed under the BSD-style license found in the
6+
# LICENSE file in the root directory of this source tree.
7+
8+
import json
9+
import logging
10+
import os
11+
import re
12+
from typing import Any, Dict
13+
14+
from examples.models import MODEL_NAME_TO_MODEL
15+
16+
17+
# Device pools for AWS Device Farm
18+
DEVICE_POOLS = {
19+
"apple_iphone_15": "arn:aws:devicefarm:us-west-2:308535385114:devicepool:02a2cf0f-6d9b-45ee-ba1a-a086587469e6/3b5acd2e-92e2-4778-b651-7726bafe129d",
20+
"apple_iphone_15+ios_18": "arn:aws:devicefarm:us-west-2:308535385114:devicepool:02a2cf0f-6d9b-45ee-ba1a-a086587469e6/12c8b15c-8d03-4e07-950d-0a627e7595b4",
21+
"samsung_galaxy_s22": "arn:aws:devicefarm:us-west-2:308535385114:devicepool:02a2cf0f-6d9b-45ee-ba1a-a086587469e6/e59f866a-30aa-4aa1-87b7-4510e5820dfa",
22+
"samsung_galaxy_s24": "arn:aws:devicefarm:us-west-2:308535385114:devicepool:02a2cf0f-6d9b-45ee-ba1a-a086587469e6/98f8788c-2e25-4a3c-8bb2-0d1e8897c0db",
23+
"google_pixel_8_pro": "arn:aws:devicefarm:us-west-2:308535385114:devicepool:02a2cf0f-6d9b-45ee-ba1a-a086587469e6/d65096ab-900b-4521-be8b-a3619b69236a",
24+
}
25+
26+
# Predefined benchmark configurations
27+
BENCHMARK_CONFIGS = {
28+
"xplat": [
29+
"xnnpack_q8",
30+
"hf_xnnpack_fp32",
31+
"llama3_fb16",
32+
"llama3_spinquant",
33+
"llama3_qlora",
34+
],
35+
"android": [
36+
"qnn_q8",
37+
# TODO: Add support for llama3 htp
38+
# "llama3_qnn_htp",
39+
],
40+
"ios": [
41+
"coreml_fp16",
42+
"mps",
43+
"llama3_coreml_ane",
44+
],
45+
}
46+
47+
48+
def parse_args() -> Any:
49+
"""
50+
Parse command-line arguments.
51+
52+
Returns:
53+
argparse.Namespace: Parsed command-line arguments.
54+
55+
Example:
56+
parse_args() -> Namespace(models=['mv3', 'meta-llama/Llama-3.2-1B-Instruct-QLORA_INT4_EO8'],
57+
os='android',
58+
devices=['samsung_galaxy_s22'])
59+
"""
60+
from argparse import ArgumentParser
61+
62+
def comma_separated(value: str):
63+
"""
64+
Parse a comma-separated string into a list.
65+
"""
66+
return value.split(",")
67+
68+
parser = ArgumentParser("Gather all benchmark configs.")
69+
parser.add_argument(
70+
"--os",
71+
type=str,
72+
choices=["android", "ios"],
73+
help="The target OS.",
74+
)
75+
parser.add_argument(
76+
"--models",
77+
type=comma_separated, # Use the custom parser for comma-separated values
78+
help=f"Comma-separated model IDs or names. Valid values include {MODEL_NAME_TO_MODEL}.",
79+
)
80+
parser.add_argument(
81+
"--devices",
82+
type=comma_separated, # Use the custom parser for comma-separated values
83+
help=f"Comma-separated device names. Available devices: {list(DEVICE_POOLS.keys())}",
84+
)
85+
86+
return parser.parse_args()
87+
88+
89+
def set_output(name: str, val: Any) -> None:
90+
"""
91+
Set the output value to be used by other GitHub jobs.
92+
93+
Args:
94+
name (str): The name of the output variable.
95+
val (Any): The value to set for the output variable.
96+
97+
Example:
98+
set_output("benchmark_configs", {"include": [...]})
99+
"""
100+
101+
if os.getenv("GITHUB_OUTPUT"):
102+
print(f"Setting {val} to GitHub output")
103+
with open(str(os.getenv("GITHUB_OUTPUT")), "a") as env:
104+
print(f"{name}={val}", file=env)
105+
else:
106+
print(f"::set-output name={name}::{val}")
107+
108+
109+
def is_valid_huggingface_model_id(model_name: str) -> bool:
110+
"""
111+
Validate if the model name matches the pattern for HuggingFace model IDs.
112+
113+
Args:
114+
model_name (str): The model name to validate.
115+
116+
Returns:
117+
bool: True if the model name matches the valid pattern, False otherwise.
118+
119+
Example:
120+
is_valid_huggingface_model_id('meta-llama/Llama-3.2-1B') -> True
121+
"""
122+
pattern = r"^[a-zA-Z0-9-_]+/[a-zA-Z0-9-_.]+$"
123+
return bool(re.match(pattern, model_name))
124+
125+
126+
def get_benchmark_configs() -> Dict[str, Dict]:
127+
"""
128+
Gather benchmark configurations for a given set of models on the target operating system and devices.
129+
130+
Args:
131+
None
132+
133+
Returns:
134+
Dict[str, Dict]: A dictionary containing the benchmark configurations.
135+
136+
Example:
137+
get_benchmark_configs() -> {
138+
"include": [
139+
{
140+
"model": "meta-llama/Llama-3.2-1B",
141+
"config": "llama3_qlora",
142+
"device_name": "apple_iphone_15",
143+
"device_arn": "arn:aws:..."
144+
},
145+
{
146+
"model": "mv3",
147+
"config": "xnnpack_q8",
148+
"device_name": "samsung_galaxy_s22",
149+
"device_arn": "arn:aws:..."
150+
},
151+
...
152+
]
153+
}
154+
"""
155+
args = parse_args()
156+
target_os = args.os
157+
devices = args.devices
158+
models = args.models
159+
160+
benchmark_configs = {"include": []}
161+
162+
for model_name in models:
163+
configs = []
164+
if is_valid_huggingface_model_id(model_name):
165+
if model_name.startswith("meta-llama/"):
166+
# LLaMA models
167+
repo_name = model_name.split("meta-llama/")[1]
168+
if "qlora" in repo_name.lower():
169+
configs.append("llama3_qlora")
170+
elif "spinquant" in repo_name.lower():
171+
configs.append("llama3_spinquant")
172+
else:
173+
configs.append("llama3_fb16")
174+
configs.extend(
175+
[
176+
config
177+
for config in BENCHMARK_CONFIGS.get(target_os, [])
178+
if config.startswith("llama")
179+
]
180+
)
181+
else:
182+
# Non-LLaMA models
183+
configs.append("hf_xnnpack_fp32")
184+
elif model_name in MODEL_NAME_TO_MODEL:
185+
# ExecuTorch in-tree non-GenAI models
186+
configs.append("xnnpack_q8")
187+
configs.extend(
188+
[
189+
config
190+
for config in BENCHMARK_CONFIGS.get(target_os, [])
191+
if not config.startswith("llama")
192+
]
193+
)
194+
else:
195+
# Skip unknown models with a warning
196+
logging.warning(f"Unknown or invalid model name '{model_name}'. Skipping.")
197+
continue
198+
199+
# Add configurations for each valid device
200+
for device in devices:
201+
for config in configs:
202+
if config == "llama3_coreml_ane" and not device.endswith("+ios_18"):
203+
device = f"{device}+ios_18"
204+
logging.info(
205+
f"Benchmark config '{config}' only works on iOS 18+, auto-upgraded device pool to '{device}'"
206+
)
207+
208+
if device not in DEVICE_POOLS:
209+
logging.warning(f"Unsupported device '{device}'. Skipping.")
210+
continue
211+
212+
record = {
213+
"model": model_name,
214+
"config": config,
215+
"device_name": device,
216+
"device_arn": DEVICE_POOLS[device],
217+
}
218+
benchmark_configs["include"].append(record)
219+
220+
set_output("benchmark_configs", json.dumps(benchmark_configs))
221+
222+
223+
if __name__ == "__main__":
224+
get_benchmark_configs()

.ci/scripts/setup-vulkan-linux-deps.sh

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ install_swiftshader() {
2727

2828
install_vulkan_sdk() {
2929
VULKAN_SDK_VERSION=$1
30-
_vulkan_sdk_url="https://sdk.lunarg.com/sdk/download/${VULKAN_SDK_VERSION}/linux/vulkansdk-linux-x86_64-${VULKAN_SDK_VERSION}.tar.gz"
30+
_vulkan_sdk_url="https://sdk.lunarg.com/sdk/download/${VULKAN_SDK_VERSION}/linux/vulkansdk-linux-x86_64-${VULKAN_SDK_VERSION}.tar.xz"
3131

3232
_vulkan_sdk_dir=/tmp/vulkansdk
3333
mkdir -p $_vulkan_sdk_dir
@@ -37,12 +37,12 @@ install_vulkan_sdk() {
3737
curl --silent --show-error --location --fail --retry 3 \
3838
--output "${_tmp_archive}" "${_vulkan_sdk_url}"
3939

40-
tar -C "${_vulkan_sdk_dir}" -xzf "${_tmp_archive}"
40+
tar -C "${_vulkan_sdk_dir}" -xJf "${_tmp_archive}"
4141

4242
export PATH="${PATH}:${_vulkan_sdk_dir}/${VULKAN_SDK_VERSION}/x86_64/bin/"
4343
}
4444

45-
VULKAN_SDK_VERSION="1.2.198.1"
45+
VULKAN_SDK_VERSION="1.3.296.0"
4646

4747
install_swiftshader
4848
install_vulkan_sdk "${VULKAN_SDK_VERSION}"

.ci/scripts/test_llama.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -208,7 +208,7 @@ prepare_artifacts_upload() {
208208
PARAMS="params.json"
209209
CHECKPOINT_FILE_NAME=""
210210
touch "${PARAMS}"
211-
if [[ "${MODEL_NAME}" == "stories110M" ]]; then
211+
if [[ "${MODEL_NAME}" == "llama" ]] || [[ "${MODEL_NAME}" == "stories"* ]] || [[ "${MODEL_NAME}" == "tinyllama" ]]; then
212212
CHECKPOINT_FILE_NAME="stories110M.pt"
213213
download_stories_model_artifacts
214214
else

.ci/scripts/test_llava.sh

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,6 @@ EXECUTORCH_COMMON_CMAKE_ARGS=" \
4141
-DEXECUTORCH_BUILD_KERNELS_OPTIMIZED=ON \
4242
-DEXECUTORCH_BUILD_KERNELS_QUANTIZED=ON \
4343
-DEXECUTORCH_BUILD_XNNPACK=ON \
44-
-DEXECUTORCH_DO_NOT_USE_CXX11_ABI=ON \
4544
-DEXECUTORCH_XNNPACK_SHARED_WORKSPACE=ON"
4645

4746
cmake_install_executorch_libraries() {

0 commit comments

Comments
 (0)