@@ -19,12 +19,20 @@ def forward(self, x):
19
19
z = torch .abs (x )
20
20
return z
21
21
22
- def _test_abs (self , inputs ):
23
- (
22
+ def _test_abs (self , inputs , legacy_mode : bool = False ):
23
+ tester = (
24
24
Tester (self .Abs (), inputs )
25
25
.export ()
26
26
.check_count ({"torch.ops.aten.abs.default" : 1 })
27
- .to_edge_transform_and_lower ()
27
+ )
28
+
29
+ if legacy_mode :
30
+ tester = tester .to_edge ().partition ()
31
+ else :
32
+ tester = tester .to_edge_transform_and_lower ()
33
+
34
+ (
35
+ tester
28
36
.check_count ({"torch.ops.higher_order.executorch_call_delegate" : 1 })
29
37
.check_not (["executorch_exir_dialects_edge__ops_aten_abs_default" ])
30
38
.to_executorch ()
@@ -41,7 +49,18 @@ def test_fp16_abs(self):
41
49
],
42
50
).to (torch .float16 ),
43
51
)
44
- self ._test_abs (inputs )
52
+ self ._test_abs (inputs , legacy_mode = False )
53
+
54
+ def test_fp16_abs_legacy_mode (self ):
55
+ inputs = (
56
+ torch .Tensor (
57
+ [
58
+ [0.0 , 0.1 , 0.5 , 0.499 ],
59
+ [- 0.6 , - 0.4 , 100.1 , - 1000.1 ],
60
+ ],
61
+ ).to (torch .float16 ),
62
+ )
63
+ self ._test_abs (inputs , legacy_mode = True )
45
64
46
65
def test_fp32_abs (self ):
47
66
inputs = (
@@ -52,4 +71,15 @@ def test_fp32_abs(self):
52
71
],
53
72
),
54
73
)
55
- self ._test_abs (inputs )
74
+ self ._test_abs (inputs , legacy_mode = False )
75
+
76
+ def test_fp32_abs_legacy_mode (self ):
77
+ inputs = (
78
+ torch .Tensor (
79
+ [
80
+ [0.0 , 0.1 , 0.5 , 0.499 ],
81
+ [- 0.6 , - 0.4 , 100.1 , - 1000.1 ],
82
+ ],
83
+ ),
84
+ )
85
+ self ._test_abs (inputs , legacy_mode = True )
0 commit comments