Skip to content

Commit 8209bc1

Browse files
cccclaifacebook-github-bot
authored andcommitted
Optionally add qnn backend to llama runner buck file (#6355)
Summary: Pull Request resolved: #6355 Include qnn backend as part of the llama runner dependency, and it's control by a build flag. Default to false. Reviewed By: kirklandsign Differential Revision: D64334713 fbshipit-source-id: 8f8f3416bd042f340eed4d50ed4a192de148d91d
1 parent 339bb74 commit 8209bc1

File tree

1 file changed

+12
-3
lines changed

1 file changed

+12
-3
lines changed

examples/models/llama/runner/targets.bzl

Lines changed: 12 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,10 +9,20 @@ def _get_operator_lib(aten = False):
99
else:
1010
return ["//executorch/configurations:optimized_native_cpu_ops", "//executorch/extension/llm/custom_ops:custom_ops"]
1111

12+
def get_qnn_dependency():
13+
# buck build -c executorch.enable_qnn=true //executorch/examples/models/llama/runner:runner
14+
# Check if QNN is enabled before including the dependency
15+
if native.read_config("executorch", "enable_qnn", "false") == "true":
16+
# //executorch/backends/qualcomm:qnn_executorch_backend doesn't work,
17+
# likely due to it's an empty library with dependency only
18+
return [
19+
"//executorch/backends/qualcomm/runtime:runtime",
20+
]
21+
return []
22+
1223
def define_common_targets():
1324
for aten in (True, False):
1425
aten_suffix = "_aten" if aten else ""
15-
1626
runtime.cxx_library(
1727
name = "runner" + aten_suffix,
1828
srcs = [
@@ -27,7 +37,6 @@ def define_common_targets():
2737
visibility = [
2838
"@EXECUTORCH_CLIENTS",
2939
],
30-
# qnn_executorch_backend can be added below //executorch/backends/qualcomm:qnn_executorch_backend
3140
exported_deps = [
3241
"//executorch/backends/xnnpack:xnnpack_backend",
3342
"//executorch/extension/llm/runner:stats",
@@ -46,7 +55,7 @@ def define_common_targets():
4655
# Vulkan API currently cannot build on some platforms (e.g. Apple, FBCODE)
4756
# Therefore enable it explicitly for now to avoid failing tests
4857
"//executorch/backends/vulkan:vulkan_backend_lib",
49-
] if native.read_config("llama", "use_vulkan", "0") == "1" else []),
58+
] if native.read_config("llama", "use_vulkan", "0") == "1" else []) + get_qnn_dependency(),
5059
external_deps = [
5160
"libtorch",
5261
] if aten else [],

0 commit comments

Comments
 (0)