Skip to content

Commit eca5d9f

Browse files
authored
Add buck file for static llama
Differential Revision: D67057242 Pull Request resolved: #7276
1 parent f28e9a5 commit eca5d9f

File tree

7 files changed

+96
-7
lines changed

7 files changed

+96
-7
lines changed

examples/qualcomm/TARGETS

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,12 @@
11
# Any targets that should be shared between fbcode and xplat must be defined in
22
# targets.bzl. This file can contain fbcode-only targets.
33

4-
load("@fbsource//xplat/executorch/build:runtime_wrapper.bzl", "runtime")
4+
load("@fbcode_macros//build_defs:python_library.bzl", "python_library")
5+
load("@fbcode_macros//build_defs:python_binary.bzl", "python_binary")
56

67
oncall("executorch")
78

8-
runtime.python_binary(
9+
python_binary(
910
name = "export_example",
1011
srcs = ["scripts/export_example.py"],
1112
main_function = ".scripts.export_example.main",
@@ -20,3 +21,12 @@ runtime.python_binary(
2021
"//executorch/extension/export_util:export_util",
2122
],
2223
)
24+
25+
python_library(
26+
name = "utils",
27+
srcs = ["utils.py"],
28+
deps = [
29+
"//executorch/backends/qualcomm/partition:partition",
30+
"//executorch/backends/qualcomm/quantizer:quantizer",
31+
],
32+
)
Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
load("@fbcode_macros//build_defs:python_library.bzl", "python_library")
2+
load("@fbsource//xplat/executorch/backends/qualcomm/qnn_version.bzl", "get_qnn_library_verision")
3+
load("@fbcode_macros//build_defs:python_binary.bzl", "python_binary")
4+
load("@fbsource//xplat/executorch/build:runtime_wrapper.bzl", "runtime")
5+
6+
oncall("executorch")
7+
8+
9+
python_library(
10+
name = "static_llama",
11+
srcs = [
12+
"model/static_llama.py",
13+
],
14+
deps = [
15+
"//caffe2:torch",
16+
],
17+
)
18+
19+
python_binary(
20+
name = "llama",
21+
srcs = ["llama.py"],
22+
main_function = "executorch.examples.qualcomm.oss_scripts.llama2.llama.main",
23+
deps = [
24+
":static_llama",
25+
"//caffe2:torch",
26+
"//executorch/extension/pybindings:aten_lib",
27+
"//executorch/backends/qualcomm/partition:partition",
28+
"//executorch/backends/qualcomm/quantizer:quantizer",
29+
"//executorch/devtools:lib",
30+
"//executorch/examples/models:models",
31+
"//executorch/examples/qualcomm:utils",
32+
"//executorch/extension/export_util:export_util",
33+
"//executorch/extension/llm/export:export_lib",
34+
],
35+
)
36+
37+
runtime.command_alias(
38+
name = "llama_qnn",
39+
env = {
40+
"LD_LIBRARY_PATH": "$(location fbsource//third-party/qualcomm/qnn/qnn-{0}:qnn_offline_compile_libs)".format(get_qnn_library_verision()),
41+
},
42+
exe = ":llama",
43+
)

examples/qualcomm/oss_scripts/llama2/llama.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,9 @@
44
# This source code is licensed under the BSD-style license found in the
55
# LICENSE file in the root directory of this source tree.
66

7+
# TODO: reenable pyre after fixing the issues
8+
# pyre-ignore-all-errors
9+
710
import codecs
811
import getpass
912
import json

examples/qualcomm/oss_scripts/llama2/model/static_llama.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,9 @@
44
# This source code is licensed under the BSD-style license found in the
55
# LICENSE file in the root directory of this source tree.
66

7+
# TODO: reenable pyre after fixing the issues
8+
# pyre-ignore-all-errors
9+
710
from typing import List, Optional, Tuple
811

912
import torch
Lines changed: 29 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,32 @@
1-
# Any targets that should be shared between fbcode and xplat must be defined in
2-
# targets.bzl. This file can contain xplat-only targets.
3-
4-
load(":targets.bzl", "define_common_targets")
1+
load("@fbcode_macros//build_defs:python_library.bzl", "python_library")
2+
load("@fbsource//xplat/executorch/backends/qualcomm/qnn_version.bzl", "get_qnn_library_verision")
3+
load("@fbcode_macros//build_defs:python_binary.bzl", "python_binary")
4+
load("@fbsource//xplat/executorch/build:runtime_wrapper.bzl", "runtime")
55

66
oncall("executorch")
77

8-
define_common_targets()
8+
python_binary(
9+
name = "llama",
10+
srcs = ["llama.py"],
11+
main_function = "executorch.examples.qualcomm.oss_scripts.llama3_2.llama.main",
12+
deps = [
13+
"//executorch/examples/qualcomm/oss_scripts/llama2:static_llama",
14+
"//caffe2:torch",
15+
"//executorch/extension/pybindings:aten_lib",
16+
"//executorch/backends/qualcomm/partition:partition",
17+
"//executorch/backends/qualcomm/quantizer:quantizer",
18+
"//executorch/devtools:lib",
19+
"//executorch/examples/models:models",
20+
"//executorch/examples/qualcomm:utils",
21+
"//executorch/extension/export_util:export_util",
22+
"//executorch/extension/llm/export:export_lib",
23+
],
24+
)
25+
26+
runtime.command_alias(
27+
name = "llama_qnn",
28+
env = {
29+
"LD_LIBRARY_PATH": "$(location fbsource//third-party/qualcomm/qnn/qnn-{0}:qnn_offline_compile_libs)".format(get_qnn_library_verision()),
30+
},
31+
exe = ":llama",
32+
)

examples/qualcomm/oss_scripts/llama3_2/llama.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,9 @@
44
# This source code is licensed under the BSD-style license found in the
55
# LICENSE file in the root directory of this source tree.
66

7+
# TODO: reenable pyre after fixing the issues
8+
# pyre-ignore-all-errors
9+
710
import getpass
811
import json
912
import logging

examples/qualcomm/utils.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,9 @@
44
# This source code is licensed under the BSD-style license found in the
55
# LICENSE file in the root directory of this source tree.
66

7+
# TODO: reenable pyre after fixing the issues
8+
# pyre-ignore-all-errors
9+
710
import argparse
811
import os
912
import subprocess

0 commit comments

Comments
 (0)