@@ -358,12 +358,12 @@ def test_splitter(splitter):
358
358
359
359
test_splitter (splitter )
360
360
361
- def test_min_block_size (self ):
361
+ def test_min_acc_module_size (self ):
362
362
"""
363
363
sin relu cos sigmoid tanh
364
364
a ====> b =====> c ====> d ========> e =====> f
365
365
366
- We set sin, cos and tanh as acc node but also set min_block_size to 2
366
+ We set sin, cos and tanh as acc node but also set min_acc_module_size to 2
367
367
and expect the whole module stay on CPU.
368
368
"""
369
369
@@ -386,9 +386,9 @@ class CustomOpSupport(op_support.OperatorSupport):
386
386
"acc_ops.tanh" : None ,
387
387
}
388
388
389
- # Create splitter setting and set min_block_size to 2
389
+ # Create splitter setting and set min_acc_module_size to 2
390
390
settings = splitter_base ._SplitterSettingBase ()
391
- settings .min_block_size = 2
391
+ settings .min_acc_module_size = 2
392
392
splitter = TRTSplitter (
393
393
mod ,
394
394
(torch .randn (2 , 3 ),),
@@ -815,7 +815,7 @@ def test_split_non_tensor_edges_2(self):
815
815
# Making 'a', 'b1', 'b2', 'd' and 'e' run on ACC with limit on ACC
816
816
# subgraph size
817
817
settings = splitter_base ._SplitterSettingBase ()
818
- settings .min_block_size = 2
818
+ settings .min_acc_module_size = 2
819
819
splitter = TRTSplitter (
820
820
module_nn ,
821
821
(test_data ,),
@@ -912,7 +912,7 @@ def test_split_non_tensor_edges_4(self):
912
912
# Making 'a', 'c', 'd' and 'e' run on ACC with limit on ACC
913
913
# subgraph size
914
914
settings = splitter_base ._SplitterSettingBase ()
915
- settings .min_block_size = 2
915
+ settings .min_acc_module_size = 2
916
916
splitter = TRTSplitter (
917
917
module_nn ,
918
918
(test_data ,),
@@ -1072,7 +1072,7 @@ def test_start_with_acc_module_(self):
1072
1072
sin relu cos sigmoid tanh
1073
1073
a ====> b =====> c ====> d ========> e =====> f
1074
1074
1075
- We set sin, relu and cos as acc node but also set min_block_size to 2
1075
+ We set sin, relu and cos as acc node but also set min_acc_module_size to 2
1076
1076
and expect the whole module stay on CPU.
1077
1077
"""
1078
1078
@@ -1095,9 +1095,9 @@ class CustomOpSupport(op_support.OperatorSupport):
1095
1095
"acc_ops.relu" : None ,
1096
1096
}
1097
1097
1098
- # Create splitter setting and set min_block_size to 2
1098
+ # Create splitter setting and set min_acc_module_size to 2
1099
1099
settings = splitter_base ._SplitterSettingBase ()
1100
- settings .min_block_size = 2
1100
+ settings .min_acc_module_size = 2
1101
1101
splitter = TRTSplitter (
1102
1102
mod ,
1103
1103
(torch .randn (2 , 3 ),),
0 commit comments