Skip to content

Commit 071636e

Browse files
Merge branch 'pytorch:main' into pr_model_improve
2 parents d2f5f47 + 1e97232 commit 071636e

File tree

411 files changed

+14849
-136243
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

411 files changed

+14849
-136243
lines changed

.ci/docker/ci_commit_pins/pytorch.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
7ae0ce6360b6e4f944906502d20da24c04debee5
1+
59d5cf083b4f860dea76fe8936076177f9367f10

.ci/scripts/setup-openvino.sh

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
#!/bin/bash
2+
# Copyright (c) Meta Platforms, Inc. and affiliates.
3+
# All rights reserved.
4+
#
5+
# This source code is licensed under the BSD-style license found in the
6+
# LICENSE file in the root directory of this source tree.
7+
8+
set -ex
9+
10+
# shellcheck source=/dev/null
11+
source "$(dirname "${BASH_SOURCE[0]}")/utils.sh"
12+
13+
git clone https://github.com/openvinotoolkit/openvino.git
14+
cd openvino && git checkout releases/2025/1
15+
git submodule update --init --recursive
16+
sudo ./install_build_dependencies.sh
17+
mkdir build && cd build
18+
cmake .. -DCMAKE_BUILD_TYPE=Release -DENABLE_PYTHON=ON
19+
make -j$(nproc)
20+
21+
cd ..
22+
cmake --install build --prefix dist
23+
24+
source dist/setupvars.sh
25+
cd ../backends/openvino
26+
pip install -r requirements.txt
27+
cd scripts
28+
./openvino_build.sh --enable_python

.ci/scripts/test_llama.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -269,7 +269,7 @@ $PYTHON_EXECUTABLE -m examples.models.llama.export_llama ${EXPORT_ARGS}
269269

270270
# Create tokenizer.bin.
271271
echo "Creating tokenizer.bin"
272-
$PYTHON_EXECUTABLE -m extension.llm.tokenizer.tokenizer -t tokenizer.model -o tokenizer.bin
272+
$PYTHON_EXECUTABLE -m pytorch_tokenizers.tools.llama2c.convert -t tokenizer.model -o tokenizer.bin
273273

274274

275275
RUNTIME_ARGS="--model_path=${EXPORTED_MODEL_NAME} --tokenizer_path=tokenizer.bin --prompt=Once --temperature=0 --seq_len=10 --warmup=1"

.ci/scripts/test_llama_torchao_lowbit.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ cmake --build cmake-out/examples/models/llama -j16 --config Release
5555
download_stories_model_artifacts
5656

5757
echo "Creating tokenizer.bin"
58-
$PYTHON_EXECUTABLE -m extension.llm.tokenizer.tokenizer -t tokenizer.model -o tokenizer.bin
58+
$PYTHON_EXECUTABLE -m pytorch_tokenizers.tools.llama2c.convert -t tokenizer.model -o tokenizer.bin
5959

6060
# Export model
6161
LLAMA_CHECKPOINT=stories110M.pt

.ci/scripts/test_openvino.sh

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
#!/bin/bash
2+
# Copyright (c) Meta Platforms, Inc. and affiliates.
3+
# All rights reserved.
4+
#
5+
# This source code is licensed under the BSD-style license found in the
6+
# LICENSE file in the root directory of this source tree.
7+
8+
set -ex
9+
10+
# shellcheck source=/dev/null
11+
source "$(dirname "${BASH_SOURCE[0]}")/utils.sh"
12+
13+
source openvino/dist/setupvars.sh
14+
cd backends/openvino/tests
15+
python test_runner.py --test_type ops
16+
python test_runner.py --test_type models

.ci/scripts/test_phi_3_mini.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ cmake_build_phi_3_mini() {
5656
prepare_tokenizer() {
5757
echo "Downloading and converting tokenizer.model"
5858
wget -O tokenizer.model "https://huggingface.co/microsoft/Phi-3-mini-128k-instruct/resolve/main/tokenizer.model?download=true"
59-
$PYTHON_EXECUTABLE -m executorch.extension.llm.tokenizer.tokenizer -t tokenizer.model -o tokenizer.bin
59+
$PYTHON_EXECUTABLE -m pytorch_tokenizers.tools.llama2c.convert -t tokenizer.model -o tokenizer.bin
6060
}
6161

6262
# Export phi-3-mini model to pte

.ci/scripts/test_qnn_static_llama.sh

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
# This source code is licensed under the BSD-style license found in the
66
# LICENSE file in the root directory of this source tree.
77

8-
set -exu
8+
set -euxo pipefail
99

1010
source "$(dirname "${BASH_SOURCE[0]}")/utils.sh"
1111

@@ -30,7 +30,7 @@ pip install graphviz
3030
# Download stories llama110m artifacts
3131
download_stories_model_artifacts
3232
echo "Creating tokenizer.bin"
33-
$PYTHON_EXECUTABLE -m extension.llm.tokenizer.tokenizer -t tokenizer.model -o tokenizer.bin
33+
$PYTHON_EXECUTABLE -m pytorch_tokenizers.tools.llama2c.convert -t tokenizer.model -o tokenizer.bin
3434

3535
set +e
3636
# Compile only as weight sharing is not applicable on x86
@@ -56,4 +56,3 @@ if [ $exit_code1 -ne 0 ] || [ $exit_code2 -ne 0 ]; then
5656
else
5757
exit 0
5858
fi
59-
set -e

.ci/scripts/utils.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ install_executorch() {
3232
which pip
3333
# Install executorch, this assumes that Executorch is checked out in the
3434
# current directory.
35-
./install_executorch.sh --pybind xnnpack "$@"
35+
./install_executorch.sh "$@"
3636
# Just print out the list of packages for debugging
3737
pip list
3838
}

.github/release.yml

Lines changed: 96 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,96 @@
1+
# .github/release.yml
2+
3+
changelog:
4+
exclude:
5+
labels:
6+
- ignore-for-release
7+
categories:
8+
- title: Breaking Changes 🛠
9+
labels:
10+
- Semver-Major
11+
- breaking-change
12+
- title: API
13+
labels:
14+
- "release notes: api"
15+
- title: ARM
16+
labels:
17+
- "release notes: arm"
18+
- "module: arm"
19+
- "partner: arm"
20+
- title: NXP
21+
labels:
22+
- "release notes: nxp"
23+
- "module: nxp"
24+
- title: Exir
25+
labels:
26+
- "release notes: exir"
27+
- "module: exir"
28+
- title: Misc
29+
labels:
30+
- "release notes: misc"
31+
- title: Apple
32+
labels:
33+
- "release notes: apple"
34+
- "module: coreml"
35+
- "module: mps"
36+
- title: Android
37+
labels:
38+
- "module: android"
39+
- title: IOS
40+
labels:
41+
- "module: ios"
42+
- title: Build
43+
labels:
44+
- "release notes: build"
45+
- title: Vulkan
46+
labels:
47+
- "release notes: vulkan"
48+
- "module: vulkan"
49+
- title: Cadence
50+
labels:
51+
- "release notes: cadence"
52+
- "module: cadence"
53+
- title: Runtime
54+
labels:
55+
- "release notes: runtime"
56+
- "module: runtime"
57+
- title: XNNPACK
58+
labels:
59+
- "release notes: xnnpack"
60+
- "module: xnnpack"
61+
- title: Devtools
62+
labels:
63+
- "release notes: devtools"
64+
- "module: devtools"
65+
- title: Examples
66+
labels:
67+
- "release notes: examples"
68+
- title: LLM
69+
labels:
70+
- "module: llm"
71+
- title: Mediatek
72+
labels:
73+
- "release notes: mediatek"
74+
- "partner: mediatek"
75+
- title: Openvino
76+
labels:
77+
- "release notes: openvino"
78+
- title: Qualcomm
79+
labels:
80+
- "release notes: qualcomm"
81+
- "partner: qualcomm"
82+
- "module: qnn"
83+
- title: Training
84+
labels:
85+
- "release notes: training"
86+
- "module: training"
87+
- title: Quantization
88+
labels:
89+
- "release notes: quantization"
90+
- title: Ops & kernels
91+
labels:
92+
- "release notes: ops & kernels"
93+
- "module: kernels"
94+
- title: Other Changes
95+
labels:
96+
- "*"

.github/scripts/extract_benchmark_results.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -360,6 +360,7 @@ def transform(
360360
"app_type": app_type,
361361
# Just keep a copy of the benchmark config here
362362
"benchmark_config": json.dumps(benchmark_config),
363+
"job_conclusion": "SUCCESS",
363364
},
364365
},
365366
"model": {
@@ -455,7 +456,7 @@ def transform_failure_record(
455456
},
456457
"metric": {
457458
"name": "FAILURE_REPORT",
458-
"benchmark_values": 0,
459+
"benchmark_values": [0],
459460
"target_value": 0,
460461
"extra_info": {
461462
"method": "",

.github/scripts/run_nm.py

Lines changed: 171 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,171 @@
1+
# Copyright (c) Meta Platforms, Inc. and affiliates.
2+
# All rights reserved.
3+
#
4+
# This source code is licensed under the BSD-style license found in the
5+
# LICENSE file in the root directory of this source tree.
6+
7+
import re
8+
import subprocess
9+
import sys
10+
from dataclasses import dataclass
11+
from typing import Dict, List, Optional, Union
12+
13+
14+
@dataclass
15+
class Symbol:
16+
name: str
17+
addr: int
18+
size: int
19+
symbol_type: str
20+
21+
22+
class Parser:
23+
def __init__(self, elf: str, toolchain_prefix: str = "", filter=None):
24+
self.elf = elf
25+
self.toolchain_prefix = toolchain_prefix
26+
self.symbols: Dict[str, Symbol] = self._get_nm_output()
27+
self.filter = filter
28+
29+
@staticmethod
30+
def run_nm(
31+
elf_file_path: str, args: Optional[List[str]] = None, nm: str = "nm"
32+
) -> str:
33+
"""
34+
Run the nm command on the specified ELF file.
35+
"""
36+
args = [] if args is None else args
37+
cmd = [nm] + args + [elf_file_path]
38+
try:
39+
result = subprocess.run(cmd, check=True, capture_output=True, text=True)
40+
return result.stdout
41+
except FileNotFoundError:
42+
print(f"Error: 'nm' command not found. Please ensure it's installed.")
43+
sys.exit(1)
44+
except subprocess.CalledProcessError as e:
45+
print(f"Error running nm on {elf_file_path}: {e}")
46+
print(f"stderr: {e.stderr}")
47+
sys.exit(1)
48+
49+
def _get_nm_output(self) -> Dict[str, Symbol]:
50+
args = [
51+
"--print-size",
52+
"--size-sort",
53+
"--reverse-sort",
54+
"--demangle",
55+
"--format=bsd",
56+
]
57+
output = Parser.run_nm(
58+
self.elf,
59+
args,
60+
nm=self.toolchain_prefix + "nm" if self.toolchain_prefix else "nm",
61+
)
62+
lines = output.splitlines()
63+
symbols = []
64+
symbol_pattern = re.compile(
65+
r"(?P<addr>[0-9a-fA-F]+)\s+(?P<size>[0-9a-fA-F]+)\s+(?P<type>\w)\s+(?P<name>.+)"
66+
)
67+
68+
def parse_line(line: str) -> Optional[Symbol]:
69+
70+
match = symbol_pattern.match(line)
71+
if match:
72+
addr = int(match.group("addr"), 16)
73+
size = int(match.group("size"), 16)
74+
type_ = match.group("type").strip().strip("\n")
75+
name = match.group("name").strip().strip("\n")
76+
return Symbol(name=name, addr=addr, size=size, symbol_type=type_)
77+
return None
78+
79+
for line in lines:
80+
symbol = parse_line(line)
81+
if symbol:
82+
symbols.append(symbol)
83+
84+
assert len(symbols) > 0, "No symbols found in nm output"
85+
if len(symbols) != len(lines):
86+
print(
87+
"** Warning: Not all lines were parsed, check the output of nm. Parsed {len(symbols)} lines, given {len(lines)}"
88+
)
89+
if any(symbol.size == 0 for symbol in symbols):
90+
print("** Warning: Some symbols have zero size, check the output of nm.")
91+
92+
# TODO: Populate the section and module fields from the linker map if available (-Wl,-Map=linker.map)
93+
return {symbol.name: symbol for symbol in symbols}
94+
95+
def print(self):
96+
print(f"Elf: {self.elf}")
97+
98+
def print_table(filter=None, filter_name=None):
99+
print("\nAddress\t\tSize\tType\tName")
100+
# Apply filter and sort symbols
101+
symbols_to_print = {
102+
name: sym
103+
for name, sym in self.symbols.items()
104+
if not filter or filter(sym)
105+
}
106+
sorted_symbols = sorted(
107+
symbols_to_print.items(), key=lambda x: x[1].size, reverse=True
108+
)
109+
110+
# Print symbols and calculate total size
111+
size_total = 0
112+
for name, sym in sorted_symbols:
113+
print(f"{hex(sym.addr)}\t\t{sym.size}\t{sym.symbol_type}\t{sym.name}")
114+
size_total += sym.size
115+
116+
# Print summary
117+
symbol_percent = len(symbols_to_print) / len(self.symbols) * 100
118+
print("-----")
119+
print(f"> Total bytes: {size_total}")
120+
print(
121+
f"Counted: {len(symbols_to_print)}/{len(self.symbols)}, {symbol_percent:0.2f}% (filter: '{filter_name}')"
122+
)
123+
print("=====\n")
124+
125+
# Print tables with different filters
126+
def is_executorch_symbol(s):
127+
return "executorch" in s.name or s.name.startswith("et")
128+
129+
FILTER_NAME_TO_FILTER_AND_LABEL = {
130+
"all": (None, "All"),
131+
"executorch": (is_executorch_symbol, "ExecuTorch"),
132+
"executorch_text": (
133+
lambda s: is_executorch_symbol(s) and s.symbol_type.lower() == "t",
134+
"ExecuTorch .text",
135+
),
136+
}
137+
138+
filter_func, label = FILTER_NAME_TO_FILTER_AND_LABEL.get(
139+
self.filter, FILTER_NAME_TO_FILTER_AND_LABEL["all"]
140+
)
141+
print_table(filter_func, label)
142+
143+
144+
if __name__ == "__main__":
145+
import argparse
146+
147+
parser = argparse.ArgumentParser(
148+
description="Process ELF file and linker map file."
149+
)
150+
parser.add_argument(
151+
"-e", "--elf-file-path", required=True, help="Path to the ELF file"
152+
)
153+
parser.add_argument(
154+
"-f",
155+
"--filter",
156+
required=False,
157+
default="all",
158+
help="Filter symbols by pre-defined filters",
159+
choices=["all", "executorch", "executorch_text"],
160+
)
161+
parser.add_argument(
162+
"-p",
163+
"--toolchain-prefix",
164+
required=False,
165+
default="",
166+
help="Optional toolchain prefix for nm",
167+
)
168+
169+
args = parser.parse_args()
170+
p = Parser(args.elf_file_path, args.toolchain_prefix, filter=args.filter)
171+
p.print()

0 commit comments

Comments
 (0)