Skip to content

Commit d30d3ac

Browse files
committed
Add buck target for hf_download (#9603)
Summary: Pull Request resolved: #9603 Reviewed By: kirklandsign Differential Revision: D71833608
1 parent 306b649 commit d30d3ac

File tree

3 files changed

+24
-5
lines changed

3 files changed

+24
-5
lines changed

examples/models/llama/TARGETS

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -118,6 +118,16 @@ runtime.python_library(
118118
],
119119
)
120120

121+
runtime.python_library(
122+
name = "hf_download",
123+
srcs = [
124+
"hf_download.py",
125+
],
126+
deps = [
127+
"fbsource//third-party/pypi/huggingface-hub:huggingface-hub",
128+
]
129+
)
130+
121131
runtime.python_library(
122132
name = "export_library",
123133
srcs = [
@@ -134,6 +144,7 @@ runtime.python_library(
134144
"@EXECUTORCH_CLIENTS",
135145
],
136146
deps = [
147+
":hf_download",
137148
":source_transformation",
138149
"//ai_codesign/gen_ai/fast_hadamard_transform:fast_hadamard_transform",
139150
"//caffe2:torch",

examples/models/llama/export_llama_lib.py

Lines changed: 12 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -539,16 +539,24 @@ def export_llama(args) -> str:
539539
if not args.checkpoint and args.model in HUGGING_FACE_REPO_IDS:
540540
repo_id = HUGGING_FACE_REPO_IDS[args.model]
541541
if args.model == "qwen2_5":
542-
from executorch.examples.models.qwen2_5 import convert_weights
542+
from executorch.examples.models.qwen2_5 import ( # pyre-ignore[21]
543+
convert_weights,
544+
)
543545
elif args.model == "phi_4_mini":
544-
from executorch.examples.models.phi_4_mini import convert_weights
546+
from executorch.examples.models.phi_4_mini import ( # pyre-ignore[21]
547+
convert_weights,
548+
)
545549
elif args.model == "smollm2":
546-
from executorch.examples.models.smollm2 import convert_weights
550+
from executorch.examples.models.smollm2 import ( # pyre-ignore[21]
551+
convert_weights,
552+
)
547553
else:
548554
raise ValueError(
549555
f"Converting weights to meta format for {args.model} is not yet supported"
550556
)
551-
args.checkpoint = download_and_convert_hf_checkpoint(repo_id, convert_weights)
557+
args.checkpoint = download_and_convert_hf_checkpoint(
558+
repo_id, convert_weights
559+
) # pyre-ignore
552560

553561
if args.profile_path is not None:
554562
try:

examples/models/llama/hf_download.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ def download_and_convert_hf_checkpoint(
3333

3434
# Use repo name to name the converted file.
3535
model_name = repo_id.replace("/", "_")
36-
converted_path = cache_dir / f"{model_name}.pth"
36+
converted_path = str(cache_dir / f"{model_name}.pth")
3737

3838
if converted_path.exists():
3939
print(f"✔ Using cached converted model: {converted_path}")

0 commit comments

Comments
 (0)