File tree Expand file tree Collapse file tree 3 files changed +41
-1
lines changed
examples/models/llama/config Expand file tree Collapse file tree 3 files changed +41
-1
lines changed Original file line number Diff line number Diff line change
1
+ # Any targets that should be shared between fbcode and xplat must be defined in
2
+ # targets.bzl. This file can contain fbcode-only targets.
3
+
4
+ load("@fbsource//xplat/executorch/build:runtime_wrapper.bzl", "runtime")
5
+ load(":targets.bzl", "define_common_targets")
6
+
7
+ oncall("executorch")
8
+
9
+ define_common_targets()
Original file line number Diff line number Diff line change @@ -218,7 +218,7 @@ class CoreMLConfig:
218
218
enable_state : bool = False
219
219
preserve_sdpa : bool = False
220
220
quantize : Optional [CoreMLQuantize ] = None
221
- ios : Literal [ 15 , 16 , 17 , 18 ] = 15
221
+ ios : int = 15
222
222
compute_units : CoreMLComputeUnit = CoreMLComputeUnit .CPU_ONLY
223
223
224
224
def __post_init__ (self ):
Original file line number Diff line number Diff line change
1
+ load ("@fbsource//xplat/executorch/build:runtime_wrapper.bzl" , "runtime" )
2
+
3
+ def define_common_targets ():
4
+ runtime .python_library (
5
+ name = "llm_config" ,
6
+ srcs = [
7
+ "llm_config.py" ,
8
+ ],
9
+ _is_external_target = True ,
10
+ base_module = "executorch.examples.models.llama.config" ,
11
+ visibility = [
12
+ "//executorch/..." ,
13
+ "@EXECUTORCH_CLIENTS" ,
14
+ ],
15
+ )
16
+
17
+ runtime .python_library (
18
+ name = "llm_config_utils" ,
19
+ srcs = [
20
+ "llm_config_utils.py" ,
21
+ ],
22
+ _is_external_target = True ,
23
+ base_module = "executorch.examples.models.llama.config" ,
24
+ visibility = [
25
+ "//executorch/..." ,
26
+ "@EXECUTORCH_CLIENTS" ,
27
+ ],
28
+ deps = [
29
+ ":llm_config" ,
30
+ ],
31
+ )
You can’t perform that action at this time.
0 commit comments