@@ -626,7 +626,7 @@ def forward(self, x_raw, h, c):
626
626
),
627
627
)
628
628
629
- new_res = program_with_delegates .exported_program (). module () (* inputs )
629
+ new_res = program_with_delegates .exported_program ()(* inputs )
630
630
for t1 , t2 in zip (new_res , orig_res , strict = True ):
631
631
self .assertTrue (torch .allclose (t1 , t2 , atol = 1e-03 , rtol = 1e-03 ))
632
632
@@ -745,7 +745,7 @@ def forward(self, x_raw, h, c):
745
745
HTAPartitionerOnePatternDemo ()
746
746
)
747
747
748
- new_res = traced_with_delegate .exported_program (). module () (* inputs )
748
+ new_res = traced_with_delegate .exported_program ()(* inputs )
749
749
for t1 , t2 in zip (new_res , orig_res , strict = True ):
750
750
self .assertTrue (torch .allclose (t1 , t2 , atol = 1e-03 , rtol = 1e-03 ))
751
751
@@ -768,7 +768,7 @@ def forward(self, x_raw, h, c):
768
768
# config=exir.ExecutorchBackendConfig(extract_delegate_segments=extract_delegate_segments),
769
769
# )
770
770
771
- new_res = program_with_delegates .exported_program (). module () (* inputs )
771
+ new_res = program_with_delegates .exported_program ()(* inputs )
772
772
for t1 , t2 in zip (new_res , orig_res , strict = True ):
773
773
self .assertTrue (torch .allclose (t1 , t2 , atol = 1e-03 , rtol = 1e-03 ))
774
774
@@ -1029,7 +1029,7 @@ def f(x, y):
1029
1029
partitioned = orig
1030
1030
partitioned = partitioned .to_backend (AddMulPartitionerDemo ())
1031
1031
1032
- new_res = partitioned .exported_program (). module () (* inputs )
1032
+ new_res = partitioned .exported_program ()(* inputs )
1033
1033
self .assertTrue (torch .allclose (orig_res , new_res [0 ]))
1034
1034
1035
1035
toplevel_lowered = get_lowered_submodules (
@@ -1102,7 +1102,7 @@ def f(xs, y):
1102
1102
map_fn_lowered [0 ][1 ].original_module .graph_module .code
1103
1103
)
1104
1104
1105
- new_res = partitioned .exported_program (). module () (* inputs )
1105
+ new_res = partitioned .exported_program ()(* inputs )
1106
1106
1107
1107
self .assertTrue (torch .allclose (orig_res , new_res [0 ]))
1108
1108
@@ -1153,7 +1153,7 @@ def f(xs, pred1, pred2, y):
1153
1153
partitioned = orig
1154
1154
partitioned = partitioned .to_backend (AddMulPartitionerDemo ())
1155
1155
1156
- new_res = partitioned .exported_program (). module () (* inputs )
1156
+ new_res = partitioned .exported_program ()(* inputs )
1157
1157
self .assertTrue (torch .allclose (orig_res , new_res [0 ]))
1158
1158
1159
1159
toplevel_lowered = get_lowered_submodules (
@@ -1224,7 +1224,7 @@ def forward(self, x: List[torch.Tensor]):
1224
1224
return self .lowered (x )
1225
1225
1226
1226
gm = to_edge (export (ComposedM (), inputs ))
1227
- gm .exported_program (). module () (* inputs )
1227
+ gm .exported_program ()(* inputs )
1228
1228
1229
1229
def test_dict_input (self ):
1230
1230
def f (x : Dict [str , torch .Tensor ]):
@@ -1246,4 +1246,4 @@ def forward(self, x: List[torch.Tensor]):
1246
1246
return self .lowered (x )
1247
1247
1248
1248
gm = to_edge (export (ComposedM (), inputs ))
1249
- gm .exported_program (). module () (* inputs )
1249
+ gm .exported_program ()(* inputs )
0 commit comments