Skip to content

Commit 7159650

Browse files
authored
Add buck target for hf_download
Differential Revision: D71833608 Pull Request resolved: #9603
1 parent 2f65c3a commit 7159650

File tree

3 files changed

+24
-7
lines changed

3 files changed

+24
-7
lines changed

examples/models/llama/TARGETS

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -118,6 +118,16 @@ runtime.python_library(
118118
],
119119
)
120120

121+
runtime.python_library(
122+
name = "hf_download",
123+
srcs = [
124+
"hf_download.py",
125+
],
126+
deps = [
127+
"fbsource//third-party/pypi/huggingface-hub:huggingface-hub",
128+
]
129+
)
130+
121131
runtime.python_library(
122132
name = "export_library",
123133
srcs = [
@@ -134,6 +144,7 @@ runtime.python_library(
134144
"@EXECUTORCH_CLIENTS",
135145
],
136146
deps = [
147+
":hf_download",
137148
":source_transformation",
138149
"//ai_codesign/gen_ai/fast_hadamard_transform:fast_hadamard_transform",
139150
"//caffe2:torch",

examples/models/llama/export_llama_lib.py

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -539,11 +539,17 @@ def export_llama(args) -> str:
539539
if not args.checkpoint and args.model in HUGGING_FACE_REPO_IDS:
540540
repo_id = HUGGING_FACE_REPO_IDS[args.model]
541541
if args.model == "qwen2_5":
542-
from executorch.examples.models.qwen2_5 import convert_weights
542+
from executorch.examples.models.qwen2_5 import ( # pyre-ignore[21]
543+
convert_weights,
544+
)
543545
elif args.model == "phi_4_mini":
544-
from executorch.examples.models.phi_4_mini import convert_weights
546+
from executorch.examples.models.phi_4_mini import ( # pyre-ignore[21]
547+
convert_weights,
548+
)
545549
elif args.model == "smollm2":
546-
from executorch.examples.models.smollm2 import convert_weights
550+
from executorch.examples.models.smollm2 import ( # pyre-ignore[21]
551+
convert_weights,
552+
)
547553
else:
548554
raise ValueError(
549555
f"Converting weights to meta format for {args.model} is not yet supported"

examples/models/llama/hf_download.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -36,8 +36,8 @@ def download_and_convert_hf_checkpoint(
3636
converted_path = cache_dir / f"{model_name}.pth"
3737

3838
if converted_path.exists():
39-
print(f"✔ Using cached converted model: {converted_path}")
40-
return converted_path
39+
print(f"✔ Using cached converted model: {str(converted_path)}")
40+
return str(converted_path)
4141

4242
# 1. Download weights from Hugging Face.
4343
print("⬇ Downloading and converting checkpoint...")
@@ -46,5 +46,5 @@ def download_and_convert_hf_checkpoint(
4646
)
4747

4848
# 2. Convert weights to Meta format.
49-
convert_weights(checkpoint_path, converted_path)
50-
return converted_path
49+
convert_weights(checkpoint_path, str(converted_path))
50+
return str(converted_path)

0 commit comments

Comments
 (0)