File tree Expand file tree Collapse file tree 10 files changed +143
-0
lines changed Expand file tree Collapse file tree 10 files changed +143
-0
lines changed Original file line number Diff line number Diff line change 49
49
op_static_constant_pad ,
50
50
op_static_resize_bilinear_2d ,
51
51
op_sub ,
52
+ op_tanh ,
52
53
op_to_copy ,
53
54
)
Original file line number Diff line number Diff line change
1
+ # Copyright (c) Meta Platforms, Inc. and affiliates.
2
+ # All rights reserved.
3
+ #
4
+ # This source code is licensed under the BSD-style license found in the
5
+ # LICENSE file in the root directory of this source tree.
6
+
7
+ from typing import Dict
8
+
9
+ import torch
10
+ from executorch .backends .xnnpack .operators .node_visitor import (
11
+ NodeVisitor ,
12
+ register_node_visitor ,
13
+ )
14
+ from executorch .backends .xnnpack .serialization .xnnpack_graph_schema import (
15
+ XNNGraph ,
16
+ XNNTanh ,
17
+ XNode ,
18
+ )
19
+ from executorch .backends .xnnpack .utils .utils import get_input_node
20
+
21
+
22
+ @register_node_visitor
23
+ class LogVisitor (NodeVisitor ):
24
+ target = "aten.tanh.default"
25
+
26
+ def __init__ (self , * args ) -> None :
27
+ super ().__init__ (* args )
28
+
29
+ def define_node (
30
+ self ,
31
+ node : torch .fx .Node ,
32
+ xnn_graph : XNNGraph ,
33
+ vals_to_ids : Dict [torch .fx .Node , int ],
34
+ debug_handle : int ,
35
+ ) -> None :
36
+ self .define_nodes_tensor_inputs_outputs (node , xnn_graph , vals_to_ids )
37
+
38
+ # input
39
+ input_id = vals_to_ids [get_input_node (node , 0 )]
40
+
41
+ # output
42
+ output_id = vals_to_ids [node ]
43
+
44
+ ser_node = XNode (
45
+ xnode_union = XNNTanh (
46
+ input_id = input_id ,
47
+ output_id = output_id ,
48
+ flags = 0 ,
49
+ ),
50
+ debug_handle = debug_handle ,
51
+ )
52
+ xnn_graph .xnodes .append (ser_node )
Original file line number Diff line number Diff line change 48
48
SoftmaxConfig ,
49
49
SquareRootConfig ,
50
50
SubConfig ,
51
+ TanhConfig ,
51
52
ToDimOrderCopyConfig ,
52
53
UpsampleBilinear2dConfig ,
53
54
)
98
99
PreluConfig ,
99
100
ReciprocalSquareRootConfig ,
100
101
ReLUConfig ,
102
+ TanhConfig ,
101
103
ToDimOrderCopyConfig ,
102
104
# SDPAConfig, TODO: D60553559: preserving SDPA for fairseq fails
103
105
SigmoidConfig ,
Original file line number Diff line number Diff line change @@ -363,6 +363,11 @@ class LogConfig(GenericNodePartitionerConfig):
363
363
def supported_precision_types (self ) -> List [ConfigPrecisionType ]:
364
364
return [ConfigPrecisionType .FP32 ]
365
365
366
+ class TanhConfig (GenericNodePartitionerConfig ):
367
+ target_name = "tanh.default"
368
+
369
+ def supported_precision_types (self ) -> List [ConfigPrecisionType ]:
370
+ return [ConfigPrecisionType .FP32 ]
366
371
367
372
class ToDimOrderCopyConfig (GenericNodePartitionerConfig ):
368
373
target_name = "_to_dim_order_copy.default"
Original file line number Diff line number Diff line change 65
65
exir_ops .edge .aten .addmm .default , # TODO(T163877189) add constraint for addmm
66
66
exir_ops .edge .aten .rsqrt .default ,
67
67
exir_ops .edge .aten .log .default ,
68
+ exir_ops .edge .aten .tanh .default ,
68
69
]
69
70
70
71
SUPPORTED_MODULES = [
Original file line number Diff line number Diff line change @@ -1448,6 +1448,36 @@ Error defineLogNode(
1448
1448
return Error::Ok;
1449
1449
}
1450
1450
1451
+ /*
1452
+ Define serialized tanh node into the subgraph, using the remapped ids
1453
+ to map the serialized ids, to the new ids generated when defining the
1454
+ tensor value
1455
+ */
1456
+ Error defineTanhNode (
1457
+ xnn_subgraph_t subgraph_ptr,
1458
+ const std::unordered_map<uint32_t , uint32_t >& remapped_ids,
1459
+ const NodePtr node,
1460
+ const fb_xnnpack::XNNGraph* graph) noexcept {
1461
+ MAYBE_UNUSED (graph);
1462
+
1463
+ auto graph_node = node->xnode_union_as_XNNTanh ();
1464
+
1465
+ xnn_status status = xnn_define_tanh (
1466
+ subgraph_ptr,
1467
+ remapped_ids.at (graph_node->input_id ()),
1468
+ remapped_ids.at (graph_node->output_id ()),
1469
+ graph_node->flags ());
1470
+
1471
+ ET_CHECK_OR_RETURN_ERROR (
1472
+ status == xnn_status_success,
1473
+ Internal,
1474
+ " Failed to create tanh node %i with code: %s" ,
1475
+ node->debug_handle (),
1476
+ xnn_status_to_string (status));
1477
+
1478
+ return Error::Ok;
1479
+ }
1480
+
1451
1481
/*
1452
1482
Define serialized ceiling node into the subgraph, using the remapped ids
1453
1483
to map the serialized ids, to the new ids generated when defining the
@@ -2012,6 +2042,7 @@ DefineNodeFunc getDefineNodeFunc(fb_xnnpack::XNodeUnion nodeType) {
2012
2042
_DEFINE (Hardswish)
2013
2043
_DEFINE (LeakyReLU)
2014
2044
_DEFINE (Log)
2045
+ _DEFINE (Tanh)
2015
2046
_DEFINE (Maximum)
2016
2047
_DEFINE (Negate)
2017
2048
_DEFINE (Square)
Original file line number Diff line number Diff line change @@ -140,6 +140,7 @@ union XNodeUnion {
140
140
XNNConvTranspose2d: _XNNNodeConv,
141
141
XNNReciprocalSquareRoot: _XNNNode1x1,
142
142
XNNLog: _XNNNode1x1,
143
+ XNNTanh: _XNNNode1x1,
143
144
}
144
145
145
146
union XValueUnion {
Original file line number Diff line number Diff line change @@ -136,6 +136,7 @@ union XNodeUnion {
136
136
XNNConvTranspose2d: _XNNNodeConv,
137
137
XNNReciprocalSquareRoot: _XNNNode1x1,
138
138
XNNLog: _XNNNode1x1,
139
+ XNNTanh: _XNNNode1x1,
139
140
}
140
141
141
142
union XValueUnion {
Original file line number Diff line number Diff line change @@ -314,6 +314,11 @@ class XNNLog(XNNNode1x1):
314
314
pass
315
315
316
316
317
+ @dataclass
318
+ class XNNTanh (XNNNode1x1 ):
319
+ pass
320
+
321
+
317
322
@dataclass
318
323
class XNNMaximum (XNNNode2x1 ):
319
324
pass
@@ -385,6 +390,7 @@ class XNNScaledDotProductAttention:
385
390
XNNBatchMatrixMultiply ,
386
391
XNNReciprocalSquareRoot ,
387
392
XNNLog ,
393
+ XNNTanh ,
388
394
]
389
395
390
396
Original file line number Diff line number Diff line change
1
+ # Copyright (c) Meta Platforms, Inc. and affiliates.
2
+ # All rights reserved.
3
+ #
4
+ # This source code is licensed under the BSD-style license found in the
5
+ # LICENSE file in the root directory of this source tree.
6
+
7
+ import unittest
8
+
9
+ import torch
10
+ from executorch .backends .xnnpack .test .tester import Tester
11
+
12
+
13
+ class TestTanh (unittest .TestCase ):
14
+ def setUp (self ):
15
+ torch ._dynamo .reset ()
16
+
17
+ class Tanh (torch .nn .Module ):
18
+ def __init__ (self ):
19
+ super ().__init__ ()
20
+
21
+ def forward (self , x ):
22
+ return torch .tanh (x )
23
+
24
+ def run_tanh_test (self , inputs ):
25
+ (
26
+ Tester (self .Tanh (), inputs )
27
+ .export ()
28
+ .check_count ({"torch.ops.aten.tanh.default" : 1 })
29
+ .to_edge_transform_and_lower ()
30
+ .check_count ({"torch.ops.higher_order.executorch_call_delegate" : 1 })
31
+ .check_not (["executorch_exir_dialects_edge__ops_aten_tanh_default" ])
32
+ .to_executorch ()
33
+ .serialize ()
34
+ .run_method_and_compare_outputs ()
35
+ )
36
+
37
+ def test_fp16_tanh (self ):
38
+ inputs = (torch .randn (20 ).to (torch .float16 ),)
39
+ self .run_tanh_test (inputs )
40
+
41
+ def test_fp32_tanh (self ):
42
+ inputs = (torch .randn (20 ),)
43
+ self .run_tanh_test (inputs )
You can’t perform that action at this time.
0 commit comments