@@ -125,22 +125,20 @@ auto acthardtanh TRTORCH_UNUSED =
125
125
LOG_DEBUG (" Output shape: " << out_tensor->getDimensions ());
126
126
return true ;
127
127
}})
128
- .pattern(
129
- {" aten::leaky_relu(Tensor self, Scalar negative_slope=0.01) -> (Tensor)" ,
130
- [](ConversionCtx* ctx, const torch::jit::Node* n, args& args) -> bool {
131
- auto self = args[0 ].ITensorOrFreeze (ctx);
132
- auto negative_slopeScalar = args[1 ].unwrapToScalar ().to <float >();
128
+ .pattern({" aten::leaky_relu(Tensor self, Scalar negative_slope=0.01) -> (Tensor)" ,
129
+ [](ConversionCtx* ctx, const torch::jit::Node* n, args& args) -> bool {
130
+ auto self = args[0 ].ITensorOrFreeze (ctx);
131
+ auto negative_slopeScalar = args[1 ].unwrapToScalar ().to <float >();
133
132
134
- auto new_layer = ctx->net ->addActivation (*self, nvinfer1::ActivationType::kLEAKY_RELU );
135
- new_layer->setAlpha (negative_slopeScalar);
136
-
137
- new_layer->setName (util::node_info (n).c_str ());
138
- auto out_tensor = new_layer->getOutput (0 );
139
- out_tensor = ctx->AssociateValueAndTensor (n->outputs ()[0 ], out_tensor);
140
- LOG_DEBUG (" Output shape: " << out_tensor->getDimensions ());
141
- return true ;
142
- }});
133
+ auto new_layer = ctx->net ->addActivation (*self, nvinfer1::ActivationType::kLEAKY_RELU );
134
+ new_layer->setAlpha (negative_slopeScalar);
143
135
136
+ new_layer->setName (util::node_info (n).c_str ());
137
+ auto out_tensor = new_layer->getOutput (0 );
138
+ out_tensor = ctx->AssociateValueAndTensor (n->outputs ()[0 ], out_tensor);
139
+ LOG_DEBUG (" Output shape: " << out_tensor->getDimensions ());
140
+ return true ;
141
+ }});
144
142
145
143
} // namespace
146
144
} // namespace impl
0 commit comments