Skip to content

Commit 9bed75d

Browse files
committed
up
1 parent ce6b865 commit 9bed75d

File tree

3 files changed

+3
-2
lines changed

3 files changed

+3
-2
lines changed

examples/models/llama/export_llama_lib.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -763,6 +763,7 @@ def _to_edge_and_lower_llama_xnnpack(
763763
raise NotImplementedError(
764764
"export_llama does not support XNNPack and generating ETRecord at the moment."
765765
)
766+
766767
builder = builder_exported.pt2e_quantize(quantizers).to_edge_transform_and_lower(
767768
partitioners
768769
)

examples/models/llama/model.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
get_default_model_resource_dir,
1717
)
1818
from executorch.examples.models.llama.llama_transformer import Transformer
19+
1920
from executorch.examples.models.llama.model_args import ModelArgs
2021
from torchao.utils import TorchAOBaseTensor
2122

@@ -101,7 +102,6 @@ def __init__(self, **kwargs):
101102
if fairseq2_checkpoint:
102103
print("Using fairseq2 checkpoint")
103104
checkpoint = convert_to_llama_checkpoint(checkpoint=checkpoint)
104-
print("checkpoint", checkpoint)
105105
if "model" in checkpoint:
106106
# NB: some checkpoint contains a "model" field, which is the actual weights dict
107107
checkpoint = checkpoint["model"]

extension/llm/export/builder.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@
3131
from executorch.exir.passes import MemoryPlanningPass
3232
from executorch.exir.passes.quant_fusion_pass import QuantFusionPass
3333
from executorch.exir.passes.sym_shape_eval_pass import ConstraintBasedSymShapeEvalPass
34+
3435
from executorch.extension.export_util.utils import export_to_edge, save_pte_program
3536

3637
from executorch.extension.llm.export.export_passes import RemoveRedundantTransposes
@@ -231,7 +232,6 @@ def _export(self, module: Optional[torch.nn.Module] = None) -> ExportedProgram:
231232
logging.info("Re-exporting with:")
232233
else:
233234
logging.info("Exporting with:")
234-
235235
logging.info(f"inputs: {self.example_inputs}")
236236
logging.info(f"kwargs: {self.example_kwarg_inputs}")
237237
logging.info(f"dynamic shapes: {dynamic_shape}")

0 commit comments

Comments
 (0)