@@ -124,7 +124,35 @@ auto acthardtanh TRTORCH_UNUSED =
124
124
out_tensor = ctx->AssociateValueAndTensor (n->outputs ()[0 ], out_tensor);
125
125
LOG_DEBUG (" Output shape: " << out_tensor->getDimensions ());
126
126
return true ;
127
- }});
127
+ }})
128
+ .pattern({" aten::leaky_relu(Tensor self, Scalar negative_slope=0.01) -> (Tensor)" ,
129
+ [](ConversionCtx* ctx, const torch::jit::Node* n, args& args) -> bool {
130
+ auto self = args[0 ].ITensorOrFreeze (ctx);
131
+ auto negative_slopeScalar = args[1 ].unwrapToScalar ().to <float >();
132
+
133
+ auto new_layer = ctx->net ->addActivation (*self, nvinfer1::ActivationType::kLEAKY_RELU );
134
+ new_layer->setAlpha (negative_slopeScalar);
135
+
136
+ new_layer->setName (util::node_info (n).c_str ());
137
+ auto out_tensor = new_layer->getOutput (0 );
138
+ out_tensor = ctx->AssociateValueAndTensor (n->outputs ()[0 ], out_tensor);
139
+ LOG_DEBUG (" Output shape: " << out_tensor->getDimensions ());
140
+ return true ;
141
+ }})
142
+ .pattern({" aten::leaky_relu_(Tensor(a!) self, Scalar negative_slope=0.01) -> Tensor(a!)" ,
143
+ [](ConversionCtx* ctx, const torch::jit::Node* n, args& args) -> bool {
144
+ auto self = args[0 ].ITensorOrFreeze (ctx);
145
+ auto negative_slopeScalar = args[1 ].unwrapToScalar ().to <float >();
146
+
147
+ auto new_layer = ctx->net ->addActivation (*self, nvinfer1::ActivationType::kLEAKY_RELU );
148
+ new_layer->setAlpha (negative_slopeScalar);
149
+
150
+ new_layer->setName (util::node_info (n).c_str ());
151
+ auto out_tensor = new_layer->getOutput (0 );
152
+ out_tensor = ctx->AssociateValueAndTensor (n->outputs ()[0 ], out_tensor);
153
+ LOG_DEBUG (" Output shape: " << out_tensor->getDimensions ());
154
+ return true ;
155
+ }});
128
156
129
157
} // namespace
130
158
} // namespace impl
0 commit comments