Skip to content

Commit 3dc9190

Browse files
committed
remove the redundent add_elementwise in layernorm.
Signed-off-by: Yu-Te Cheng <[email protected]>
1 parent 93cf6d6 commit 3dc9190

File tree

1 file changed

+0
-62
lines changed

1 file changed

+0
-62
lines changed

core/conversion/converters/impl/layer_norm.cpp

Lines changed: 0 additions & 62 deletions
Original file line numberDiff line numberDiff line change
@@ -10,68 +10,6 @@ namespace converters {
1010
namespace impl {
1111
namespace {
1212

13-
nvinfer1::ILayer* add_elementwise(
14-
ConversionCtx* ctx,
15-
nvinfer1::ElementWiseOperation op,
16-
nvinfer1::ITensor* self,
17-
nvinfer1::ITensor* other,
18-
const std::string& name) {
19-
// ensure self to have larger number of dimension
20-
bool swapSelfOther = false;
21-
if (self->getDimensions().nbDims < other->getDimensions().nbDims) {
22-
std::swap(self, other);
23-
swapSelfOther = true;
24-
}
25-
auto selfDim = util::toVec(self->getDimensions());
26-
auto otherDim = util::toVec(other->getDimensions());
27-
if (selfDim.size() != otherDim.size()) {
28-
// other is with dynamic shape, need to expand its dimension now and get its
29-
// shape at runtime
30-
if (otherDim.end() != std::find(otherDim.begin(), otherDim.end(), -1)) {
31-
auto thOtherStaticShapeMask = torch::ones(selfDim.size(), torch::kInt32);
32-
auto thOtherDynamicShapeMask = torch::zeros(selfDim.size(), torch::kInt32);
33-
for (size_t start = selfDim.size() - otherDim.size(), idx = 0; idx < otherDim.size(); ++idx) {
34-
if (-1 != otherDim[idx]) {
35-
thOtherStaticShapeMask[start + idx] = otherDim[idx];
36-
} else {
37-
thOtherStaticShapeMask[start + idx] = 0;
38-
thOtherDynamicShapeMask[start + idx] = 1;
39-
}
40-
}
41-
auto otherStaticShapeMask = tensor_to_const(ctx, thOtherStaticShapeMask);
42-
auto otherDynamicShapeMask = tensor_to_const(ctx, thOtherDynamicShapeMask);
43-
auto selfShape = ctx->net->addShape(*self)->getOutput(0);
44-
// size of dynamic dimension of other need to the same as that of
45-
// corresponding dimension of self
46-
auto otherDynamicShape =
47-
ctx->net->addElementWise(*selfShape, *otherDynamicShapeMask, nvinfer1::ElementWiseOperation::kPROD)
48-
->getOutput(0);
49-
auto targetOtherShape =
50-
ctx->net->addElementWise(*otherDynamicShape, *otherStaticShapeMask, nvinfer1::ElementWiseOperation::kSUM)
51-
->getOutput(0);
52-
53-
auto otherShuffle = ctx->net->addShuffle(*other);
54-
otherShuffle->setName(std::string("Reshape other tensor to have the same nDim as self for " + name).c_str());
55-
otherShuffle->setInput(1, *targetOtherShape);
56-
other = otherShuffle->getOutput(0);
57-
} else {
58-
// other is with static shape, expand dimension to make tow tensor have
59-
// the same number of dimension
60-
auto otherShuffle = ctx->net->addShuffle(*other);
61-
otherShuffle->setReshapeDimensions(util::toDimsPad(otherDim, selfDim.size()));
62-
other = otherShuffle->getOutput(0);
63-
}
64-
}
65-
if (swapSelfOther) {
66-
// swap back
67-
std::swap(self, other);
68-
swapSelfOther = false;
69-
}
70-
auto ele = ctx->net->addElementWise(*self, *other, op);
71-
ele->setName(name.c_str());
72-
return ele;
73-
}
74-
7513
auto layer_norm_registrations TRTORCH_UNUSED = RegisterNodeConversionPatterns().pattern({
7614
R"SIG(aten::layer_norm(Tensor input, int[] normalized_shape, Tensor? gamma, Tensor? beta,
7715
float eps, bool cudnn_enabled) -> (Tensor))SIG",

0 commit comments

Comments
 (0)