Skip to content

Commit a446005

Browse files
committed
enable aoti for preprocess ci
ghstack-source-id: b414e24 Pull Request resolved: #6553
1 parent d34bd1e commit a446005

File tree

2 files changed

+14
-21
lines changed

2 files changed

+14
-21
lines changed

examples/models/llama3_2_vision/preprocess/export_preprocess.py

Lines changed: 9 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -24,29 +24,22 @@ def main():
2424
strict=False,
2525
)
2626

27-
# Executorch
27+
# AOTInductor. Note: export AOTI before ExecuTorch, as
28+
# ExecuTorch will modify the ExportedProgram.
29+
torch._inductor.aot_compile(
30+
ep.module(),
31+
model.get_example_inputs(),
32+
options={"aot_inductor.output_path": "preprocess_aoti.so"},
33+
)
34+
35+
# Executorch.
2836
edge_program = to_edge(
2937
ep, compile_config=EdgeCompileConfig(_check_ir_validity=False)
3038
)
3139
et_program = edge_program.to_executorch()
3240
with open("preprocess_et.pte", "wb") as file:
3341
et_program.write_to_file(file)
3442

35-
# Export.
36-
# ep = torch.export.export(
37-
# model.get_eager_model(),
38-
# model.get_example_inputs(),
39-
# dynamic_shapes=model.get_dynamic_shapes(),
40-
# strict=False,
41-
# )
42-
#
43-
# # AOTInductor
44-
# torch._inductor.aot_compile(
45-
# ep.module(),
46-
# model.get_example_inputs(),
47-
# options={"aot_inductor.output_path": "preprocess_aoti.so"},
48-
# )
49-
5043

5144
if __name__ == "__main__":
5245
main()

examples/models/llama3_2_vision/preprocess/test_preprocess.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -265,11 +265,11 @@ def run_preprocess(
265265
), f"Executorch model: expected {reference_ar} but got {et_ar.tolist()}"
266266

267267
# Run aoti model and check it matches reference model.
268-
# aoti_path = models["aoti_path"]
269-
# aoti_model = torch._export.aot_load(aoti_path, "cpu")
270-
# aoti_image, aoti_ar = aoti_model(image_tensor, inscribed_size, best_resolution)
271-
# self.assertTrue(torch.allclose(reference_image, aoti_image))
272-
# self.assertEqual(reference_ar, aoti_ar.tolist())
268+
aoti_path = models["aoti_path"]
269+
aoti_model = torch._export.aot_load(aoti_path, "cpu")
270+
aoti_image, aoti_ar = aoti_model(image_tensor, inscribed_size, best_resolution)
271+
self.assertTrue(torch.allclose(reference_image, aoti_image))
272+
self.assertEqual(reference_ar, aoti_ar.tolist())
273273

274274
# This test setup mirrors the one in torchtune:
275275
# https://github.com/pytorch/torchtune/blob/main/tests/torchtune/models/clip/test_clip_image_transform.py

0 commit comments

Comments
 (0)