Skip to content

Commit f5b502a

Browse files
Thiago Crepaldifacebook-github-bot
authored andcommitted
Rename torch.onnx.ExportOutput* to ONNXProgram* (#112263)
Summary: Since PyTorch 2.1, torch.export API was introduced and the term "export" got overloaded due to the already existing torch.onnx.export API. The torch.onnx.dynamo_export API was introduced on pyTorch 2.0 and it exposed a torch.onnx.ExportOutput which now can be confused with torch.export.export output To prevent such ambiguity and standardize names around the new torch.export.ExportedProgram, this PR renames torch.onnx.ExportOutput to torch.onnx.ONNXProgram X-link: pytorch/pytorch#112263 Approved by: https://github.com/BowenBao ghstack dependencies: #112444 Reviewed By: PaliC Differential Revision: D51057229 fbshipit-source-id: f43c1fa8d1820ad69df61ac9f8f84d5ec3995fbe
1 parent 1e03c23 commit f5b502a

File tree

1 file changed

+15
-15
lines changed

1 file changed

+15
-15
lines changed

userbenchmark/dynamo/dynamobench/common.py

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1527,31 +1527,31 @@ class OnnxModelFromDynamo(OnnxModel):
15271527
def __init__(self, output_directory, model, example_inputs, dynamic_shapes: bool):
15281528
super().__init__(output_directory, model, example_inputs, dynamic_shapes)
15291529
self._dynamic_shapes = dynamic_shapes
1530-
self._export_output = self._export(model, example_inputs, self.model_path)
1530+
self._onnx_program = self._export(model, example_inputs, self.model_path)
15311531
# Clear the model proto to save memory.
1532-
# The model proto is saved to disk and no longer needed from `export_output`.
1533-
# `export_output` is kept for i/o adapter usage.
1534-
self._export_output.model_proto.Clear()
1532+
# The model proto is saved to disk and no longer needed from `onnx_program`.
1533+
# `onnx_program` is kept for i/o adapter usage.
1534+
self._onnx_program.model_proto.Clear()
15351535
self.onnx_session = self._init_ort_session(self.model_path)
15361536

15371537
def _export(
15381538
self, model, example_inputs, output_path: str
1539-
) -> torch.onnx.ExportOutput:
1539+
) -> torch.onnx.ONNXProgram:
15401540
example_args, example_kwargs = _normalize_bench_inputs(example_inputs)
15411541
options = torch.onnx.ExportOptions(dynamic_shapes=self._dynamic_shapes)
1542-
export_output = torch.onnx.dynamo_export(
1542+
onnx_program = torch.onnx.dynamo_export(
15431543
model, *example_args, **example_kwargs, export_options=options
15441544
)
15451545

1546-
export_output.save(output_path)
1547-
return export_output
1546+
onnx_program.save(output_path)
1547+
return onnx_program
15481548

15491549
def format_pt_inputs(self, pt_inputs):
15501550
pt_args, pt_kwargs = _normalize_bench_inputs(pt_inputs)
1551-
return self._export_output.adapt_torch_inputs_to_onnx(*pt_args, **pt_kwargs)
1551+
return self._onnx_program.adapt_torch_inputs_to_onnx(*pt_args, **pt_kwargs)
15521552

15531553
def format_pt_outputs(self, pt_outputs):
1554-
return self._export_output.adapt_torch_outputs_to_onnx(pt_outputs)
1554+
return self._onnx_program.adapt_torch_outputs_to_onnx(pt_outputs)
15551555

15561556

15571557
class OnnxModelFromDynamoAotInline(OnnxModelFromDynamo):
@@ -1561,10 +1561,10 @@ class OnnxModelFromDynamoAotInline(OnnxModelFromDynamo):
15611561

15621562
def _export(
15631563
self, model, example_inputs, output_path: str
1564-
) -> torch.onnx.ExportOutput:
1564+
) -> torch.onnx.ONNXProgram:
15651565
example_args, example_kwargs = _normalize_bench_inputs(example_inputs)
15661566
options = torch.onnx.ExportOptions(dynamic_shapes=self._dynamic_shapes)
1567-
export_output = torch.onnx.dynamo_export(
1567+
onnx_program = torch.onnx.dynamo_export(
15681568
model, *example_args, **example_kwargs, export_options=options
15691569
)
15701570
# Apply AOT inline post export.
@@ -1575,12 +1575,12 @@ def _export(
15751575
# Workaround for inliner not supporting with models larger than 2GB.
15761576
# Save model to disk first separating out external data,
15771577
# and load back without external data for inliner to work on.
1578-
model_proto = export_output.model_proto
1578+
model_proto = onnx_program.model_proto
15791579
onnx.save_model(model_proto, output_path, save_as_external_data=True)
15801580
model_proto = onnx.load(output_path, load_external_data=False)
15811581
model_proto = onnx.inliner.inline_local_functions(model_proto)
15821582
onnx.save_model(model_proto, output_path)
1583-
return export_output
1583+
return onnx_program
15841584

15851585

15861586
class _OnnxPatch:
@@ -1786,7 +1786,7 @@ def run_n_iterations_onnx(model, inputs, n=2):
17861786
return outputs
17871787
except exporter.OnnxExporterError as e:
17881788
# `torch.onnx.dynamo_export` raises error that encloses diagnostics.
1789-
diagnostic_context = e.export_output.diagnostic_context
1789+
diagnostic_context = e.onnx_program.diagnostic_context
17901790
for parsed_error in parser.parse_diagnostic_context(diagnostic_context):
17911791
output_csv(
17921792
output_error_filename, parsed_error.headers, parsed_error.row

0 commit comments

Comments
 (0)