Skip to content

Commit 36a5bc6

Browse files
jackzhxngfacebook-github-bot
authored andcommitted
Revert export_for_training migration in llm/export/builder.py (#6029)
Summary: Have Llama export working until pytorch/pytorch#137540 is fixed Pull Request resolved: #6029 Reviewed By: yushangdi Differential Revision: D64078126 Pulled By: dvorjackz fbshipit-source-id: 05ced734f19f541835426fd24290c9b2a9e1346c
1 parent 566902b commit 36a5bc6

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

extension/llm/export/builder.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -29,10 +29,10 @@
2929

3030
from executorch.extension.export_util.utils import export_to_edge, save_pte_program
3131
from executorch.extension.llm.tokenizer.utils import get_tokenizer
32+
from torch._export import capture_pre_autograd_graph
3233
from torch.ao.quantization.quantize_pt2e import convert_pt2e, prepare_pt2e
3334
from torch.ao.quantization.quantizer import Quantizer
3435
from torch.ao.quantization.quantizer.composable_quantizer import ComposableQuantizer
35-
from torch.export import export_for_training
3636
from torch.nn.attention import SDPBackend
3737

3838
FORMAT = "[%(levelname)s %(asctime)s %(filename)s:%(lineno)s] %(message)s"
@@ -190,9 +190,9 @@ def capture_pre_autograd_graph(self) -> "LLMEdgeManager":
190190
strict=True,
191191
).module()
192192
else:
193-
self.pre_autograd_graph_module = export_for_training(
193+
self.pre_autograd_graph_module = capture_pre_autograd_graph(
194194
self.model, self.example_inputs, dynamic_shapes=dynamic_shape
195-
).module()
195+
)
196196

197197
return self
198198

0 commit comments

Comments
 (0)