Skip to content

Commit 3be558d

Browse files
authored
Merge pull request #87 from Xilinx/sayeddla.new_tosa_verifiers
new tosa verifiers
2 parents 20684a4 + 6cd0a91 commit 3be558d

File tree

4 files changed

+202
-10
lines changed

4 files changed

+202
-10
lines changed

mlir/include/mlir/Dialect/Tosa/IR/TosaOps.td

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1429,15 +1429,14 @@ def Tosa_ConcatOp : Tosa_Op<"concat", [
14291429
static bool isCompatibleReturnTypes(TypeRange l, TypeRange r);
14301430
}];
14311431
let hasFolder = 1;
1432+
let hasVerifier = 1;
14321433
}
14331434

14341435
//===----------------------------------------------------------------------===//
14351436
// Operator: pad
14361437
//===----------------------------------------------------------------------===//
14371438
def Tosa_PadOp : Tosa_Op<"pad", [
1438-
DeclareOpInterfaceMethods<InferShapedTypeOpInterface,
1439-
["inferReturnTypeComponents"]>,
1440-
Pure]> {
1439+
InferTensorType, Pure]> {
14411440
let summary = "Pads a tensor with value specified.";
14421441

14431442
let description = [{
@@ -1476,6 +1475,14 @@ def Tosa_PadOp : Tosa_Op<"pad", [
14761475

14771476
let hasCanonicalizer = 1;
14781477
let hasFolder = 1;
1478+
let hasVerifier = 1;
1479+
1480+
let extraClassDeclaration = [{
1481+
/// Returns true when two result types are compatible for this op;
1482+
/// Method used by InferTypeOpInterface.
1483+
static bool isCompatibleReturnTypes(TypeRange l, TypeRange r);
1484+
}];
1485+
14791486
}
14801487

14811488
//===----------------------------------------------------------------------===//

mlir/lib/Dialect/Tosa/IR/TosaOps.cpp

Lines changed: 74 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -503,6 +503,41 @@ LogicalResult tosa::ConcatOp::inferReturnTypeComponents(
503503
return success();
504504
}
505505

506+
LogicalResult ConcatOp::verify() {
507+
OperandRange inputs = getInput1();
508+
509+
auto inputRank = ShapedType::kDynamic;
510+
bool hasRankedInputs = false;
511+
for (auto input : inputs) {
512+
auto inputType = llvm::cast<ShapedType>(input.getType());
513+
if (inputType.hasRank()) {
514+
hasRankedInputs = true;
515+
inputRank = inputType.getRank();
516+
break;
517+
}
518+
}
519+
520+
if (hasRankedInputs) {
521+
int64_t axis = getAxis();
522+
if (axis < 0 || axis >= std::max((int64_t)1, inputRank)) {
523+
return emitOpError() << "axis must be in range 0 to " << inputRank - 1;
524+
}
525+
526+
for (auto input : inputs) {
527+
auto inputType = llvm::cast<ShapedType>(input.getType());
528+
if (!inputType.hasRank()) {
529+
continue;
530+
}
531+
if (inputRank != inputType.getRank()) {
532+
return emitOpError()
533+
<< "rank of input " << inputType
534+
<< " does not match other input rank(s) (" << inputRank << ")";
535+
}
536+
}
537+
}
538+
return success();
539+
}
540+
506541
LogicalResult tosa::EqualOp::inferReturnTypeComponents(
507542
MLIRContext *context, ::std::optional<Location> location,
508543
ValueShapeRange operands, DictionaryAttr attributes,
@@ -590,6 +625,7 @@ LogicalResult tosa::PadOp::inferReturnTypeComponents(
590625
ValueShapeRange operands, DictionaryAttr attributes,
591626
OpaqueProperties properties, RegionRange regions,
592627
SmallVectorImpl<ShapedTypeComponents> &inferredReturnShapes) {
628+
Type inputType = getElementTypeOrSelf(operands[0]);
593629
ShapeAdaptor inputShape = operands.getShape(0);
594630
ShapeAdaptor paddingShape = operands.getShape(1);
595631
SmallVector<int64_t> outputShape;
@@ -610,15 +646,17 @@ LogicalResult tosa::PadOp::inferReturnTypeComponents(
610646
}
611647

612648
outputShape.resize(paddingShape.getDimSize(0), ShapedType::kDynamic);
613-
inferredReturnShapes.push_back(ShapedTypeComponents(outputShape));
649+
inferredReturnShapes.push_back(
650+
ShapedTypeComponents(outputShape, inputType));
614651
return success();
615652
}
616653

617654
DenseIntElementsAttr paddings;
618655
// If the paddings value is not a constant, all dimensions must be dynamic.
619656
if (!matchPattern(operands[1], m_Constant(&paddings))) {
620657
outputShape.resize(inputShape.getRank(), ShapedType::kDynamic);
621-
inferredReturnShapes.push_back(ShapedTypeComponents(outputShape));
658+
inferredReturnShapes.push_back(
659+
ShapedTypeComponents(outputShape, inputType));
622660
return success();
623661
}
624662

@@ -638,7 +676,39 @@ LogicalResult tosa::PadOp::inferReturnTypeComponents(
638676
paddingValues[i * 2 + 1]);
639677
}
640678

641-
inferredReturnShapes.push_back(ShapedTypeComponents(outputShape));
679+
inferredReturnShapes.push_back(ShapedTypeComponents(outputShape, inputType));
680+
return success();
681+
}
682+
683+
LogicalResult PadOp::verify() {
684+
ShapedType inputType = llvm::cast<ShapedType>(getInput1().getType());
685+
if (inputType.hasRank() && inputType.getRank() == 0) {
686+
return emitOpError() << "input tensor rank must not be 0";
687+
}
688+
689+
ShapedType paddingType = llvm::cast<ShapedType>(getPadding().getType());
690+
if (paddingType.hasRank()) {
691+
if (paddingType.getRank() != 2) {
692+
return emitOpError() << "paddings must be a tensor of rank 2";
693+
}
694+
if (inputType.hasRank() && !paddingType.isDynamicDim(0) &&
695+
inputType.getRank() != paddingType.getDimSize(0)) {
696+
return emitOpError() << "paddings must be a tensor of shape ["
697+
<< inputType.getRank() << ", 2]";
698+
}
699+
if (!paddingType.isDynamicDim(1) && paddingType.getDimSize(1) != 2) {
700+
return emitOpError() << "paddings must be a tensor of shape ["
701+
<< inputType.getRank() << ", 2]";
702+
}
703+
704+
DenseIntElementsAttr paddings;
705+
if (matchPattern(getPadding(), m_Constant(&paddings))) {
706+
if (llvm::any_of(paddings,
707+
[](auto val) { return val.getSExtValue() < 0; })) {
708+
return emitOpError() << "number of pad elements must be positive";
709+
}
710+
}
711+
}
642712
return success();
643713
}
644714

@@ -1069,6 +1139,7 @@ REDUCE_SHAPE_INFER(tosa::ReduceProdOp)
10691139
REDUCE_SHAPE_INFER(tosa::ReduceSumOp)
10701140
#undef REDUCE_SHAPE_INFER
10711141
COMPATIBLE_RETURN_TYPES(tosa::ConcatOp)
1142+
COMPATIBLE_RETURN_TYPES(tosa::PadOp)
10721143
#undef COMPATIBLE_RETURN_TYPES
10731144

10741145
static LogicalResult NAryInferReturnTypes(

mlir/test/Conversion/TosaToTensor/tosa-to-tensor.mlir

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -182,7 +182,7 @@ func.func @pad_dyn_input(%arg0 : tensor<?x2xf32>) -> (tensor<?x9xf32>) {
182182
}
183183

184184
func.func @pad_dyn_padding(%arg0 : tensor<1x2xf32>) -> (tensor<?x9xf32>) {
185-
%0 = arith.constant dense<[[-1, 2], [3, 4]]> : tensor<2x2xi32>
185+
%0 = arith.constant dense<[[1, 2], [3, 4]]> : tensor<2x2xi32>
186186
// TODO: Output contains multiple "arith.constant 1 : index".
187187
// CHECK-DAG: [[INDEX1:%.+]] = arith.constant 1 : index
188188
// CHECK-DAG: [[INDEX2:%.+]] = arith.constant 2 : index

mlir/test/Dialect/Tosa/invalid.mlir

Lines changed: 117 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -56,6 +56,48 @@ func.func @test_concat_element_type_mismatch(%arg0 : tensor<1x2xf32>, %arg1 : te
5656

5757
// -----
5858

59+
func.func @test_concat_output_shape_mismatch(%arg0 : tensor<2x1xf32>, %arg1 : tensor<2x2xf32>) -> tensor<2x2xf32> {
60+
// expected-error@+2 {{failed to infer returned types}}
61+
// expected-error@+1 {{inferred type(s) 'tensor<2x3xf32>' are incompatible with return type(s) of operation 'tensor<2x2xf32>}}
62+
%0 = "tosa.concat"(%arg0, %arg1) {axis = 1 : i64} : (tensor<2x1xf32>, tensor<2x2xf32>) -> tensor<2x2xf32>
63+
return %0 : tensor<2x2xf32>
64+
}
65+
66+
// -----
67+
68+
func.func @test_concat_output_rank_mismatch(%arg0 : tensor<2x1xf32>, %arg1 : tensor<2x2xf32>) -> tensor<?x?x?xf32> {
69+
// expected-error@+2 {{failed to infer returned types}}
70+
// expected-error@+1 {{inferred type(s) 'tensor<2x3xf32>' are incompatible with return type(s) of operation 'tensor<?x?x?xf32>}}
71+
%0 = "tosa.concat"(%arg0, %arg1) {axis = 1 : i64} : (tensor<2x1xf32>, tensor<2x2xf32>) -> tensor<?x?x?xf32>
72+
return %0 : tensor<?x?x?xf32>
73+
}
74+
75+
// -----
76+
77+
func.func @test_concat_input_rank_mismatch(%arg0 : tensor<1x2xf32>, %arg1 : tensor<2x2x2xf32>) -> tensor<?x?xf32> {
78+
// expected-error@+1 {{'tosa.concat' op rank of input 'tensor<2x2x2xf32>' does not match other input rank(s) (2)}}
79+
%0 = "tosa.concat"(%arg0, %arg1) {axis = 0 : i64} : (tensor<1x2xf32>, tensor<2x2x2xf32>) -> tensor<?x?xf32>
80+
return %0 : tensor<?x?xf32>
81+
}
82+
83+
// -----
84+
85+
func.func @test_concat_axis_out_of_range(%arg0 : tensor<1x2xf32>, %arg1 : tensor<2x2xf32>) -> tensor<?x?xf32> {
86+
// expected-error@+1 {{'tosa.concat' op axis must be in range 0 to 1}}
87+
%0 = "tosa.concat"(%arg0, %arg1) {axis = -1 : i64} : (tensor<1x2xf32>, tensor<2x2xf32>) -> tensor<?x?xf32>
88+
return %0 : tensor<?x?xf32>
89+
}
90+
91+
// -----
92+
93+
func.func @test_concat_axis_out_of_range(%arg0 : tensor<10x11x12xf32>, %arg1 : tensor<10x11x21xf32>) -> tensor<?x?x?xf32> {
94+
// expected-error@+1 {{'tosa.concat' op axis must be in range 0 to 2}}
95+
%0 = "tosa.concat"(%arg0, %arg1) {axis = 3 : i64} : (tensor<10x11x12xf32>, tensor<10x11x21xf32>) -> tensor<?x?x?xf32>
96+
return %0 : tensor<?x?x?xf32>
97+
}
98+
99+
// -----
100+
59101
func.func @test_pad_non_const(%arg0: tensor<13x21x3xf32>, %arg1: tensor<3x2xi32>) -> tensor<13x21x3xf32> {
60102
// expected-error@+1 {{'tosa.pad' op padding of pad is not constant}}
61103
%0 = "tosa.pad"(%arg0, %arg1) : (tensor<13x21x3xf32>, tensor<3x2xi32>) -> tensor<13x21x3xf32>
@@ -64,11 +106,83 @@ func.func @test_pad_non_const(%arg0: tensor<13x21x3xf32>, %arg1: tensor<3x2xi32>
64106

65107
// -----
66108

67-
func.func @test_pad_non_const(%arg0: tensor<13x21x3xi8>, %arg1: tensor<i8>) -> tensor<13x21x3xi8> {
109+
func.func @test_pad_non_const(%arg0: tensor<13x21x3xi8>, %arg1: tensor<i8>) -> tensor<?x?x?xi8> {
68110
%0 = "tosa.const"() {value = dense<[[0, 0], [0, 1], [0, 1]]> : tensor<3x2xi32>} : () -> tensor<3x2xi32>
69111
// expected-error@+1 {{'tosa.pad' op pad_const of pad is not constant}}
70-
%1 = "tosa.pad"(%arg0, %0, %arg1) : (tensor<13x21x3xi8>, tensor<3x2xi32>, tensor<i8>) -> tensor<13x21x3xi8>
71-
return %1 : tensor<13x21x3xi8>
112+
%1 = "tosa.pad"(%arg0, %0, %arg1) : (tensor<13x21x3xi8>, tensor<3x2xi32>, tensor<i8>) -> tensor<?x?x?xi8>
113+
return %1 : tensor<?x?x?xi8>
114+
}
115+
116+
// -----
117+
118+
func.func @test_pad_output_shape_mismatch(%arg0: tensor<13x21x3xf32>) -> tensor<13x21x3xf32> {
119+
%0 = "tosa.const"() {value = dense<[[1, 1], [1, 1], [1, 1]]> : tensor<3x2xi32>} : () -> tensor<3x2xi32>
120+
// expected-error@+2 {{'tosa.pad' op failed to infer returned types}}
121+
// expected-error@+1 {{'tosa.pad' op inferred type(s) 'tensor<15x23x5xf32>' are incompatible with return type(s) of operation 'tensor<13x21x3xf32>}}
122+
%1 = "tosa.pad"(%arg0, %0) : (tensor<13x21x3xf32>, tensor<3x2xi32>) -> tensor<13x21x3xf32>
123+
return %1 : tensor<13x21x3xf32>
124+
}
125+
126+
// -----
127+
128+
func.func @test_pad_type_mismatch(%arg0: tensor<13x21x3xf32>) -> tensor<15x23x5xi32> {
129+
%0 = "tosa.const"() {value = dense<[[1, 1], [1, 1], [1, 1]]> : tensor<3x2xi32>} : () -> tensor<3x2xi32>
130+
// expected-error@+2 {{'tosa.pad' op failed to infer returned types}}
131+
// expected-error@+1 {{'tosa.pad' op inferred type(s) 'tensor<15x23x5xf32>' are incompatible with return type(s) of operation 'tensor<15x23x5xi32>}}
132+
%1 = "tosa.pad"(%arg0, %0) : (tensor<13x21x3xf32>, tensor<3x2xi32>) -> tensor<15x23x5xi32>
133+
return %1 : tensor<15x23x5xi32>
134+
}
135+
136+
// -----
137+
138+
func.func @test_pad_incorret_padding_rank(%arg0: tensor<13x21xf32>) -> tensor<13x21xf32> {
139+
%0 = "tosa.const"() {value = dense<[0, 1]> : tensor<2xi32>} : () -> tensor<2xi32>
140+
// expected-error@+1 {{'tosa.pad' op paddings must be a tensor of rank 2}}
141+
%1 = "tosa.pad"(%arg0, %0) : (tensor<13x21xf32>, tensor<2xi32>) -> tensor<13x21xf32>
142+
return %1 : tensor<13x21xf32>
143+
}
144+
145+
// -----
146+
147+
func.func @test_pad_incorret_padding_shape(%arg0: tensor<13x21xf32>) -> tensor<13x21xf32> {
148+
%0 = "tosa.const"() {value = dense<[[0, 0], [0, 1], [0, 1], [1, 1]]> : tensor<4x2xi32>} : () -> tensor<4x2xi32>
149+
// expected-error@+1 {{'tosa.pad' op paddings must be a tensor of shape [2, 2]}}
150+
%1 = "tosa.pad"(%arg0, %0) : (tensor<13x21xf32>, tensor<4x2xi32>) -> tensor<13x21xf32>
151+
return %1 : tensor<13x21xf32>
152+
}
153+
154+
// -----
155+
156+
func.func @test_pad_incorret_padding_shape(%arg0: tensor<13x21xf32>) -> tensor<13x21xf32> {
157+
%0 = "tosa.const"() {value = dense<[[0, 0, 0, 1], [0, 1, 1, 1]]> : tensor<2x4xi32>} : () -> tensor<2x4xi32>
158+
// expected-error@+1 {{'tosa.pad' op paddings must be a tensor of shape [2, 2]}}
159+
%1 = "tosa.pad"(%arg0, %0) : (tensor<13x21xf32>, tensor<2x4xi32>) -> tensor<13x21xf32>
160+
return %1 : tensor<13x21xf32>
161+
}
162+
163+
// -----
164+
165+
func.func @test_pad_negative_padding(%arg0: tensor<13x21xf32>) -> tensor<?x?xf32> {
166+
%0 = "tosa.const"() {value = dense<[[0, 0], [0, -1]]> : tensor<2x2xi32>} : () -> tensor<2x2xi32>
167+
// expected-error@+1 {{'tosa.pad' op number of pad elements must be positive}}
168+
%1 = "tosa.pad"(%arg0, %0) : (tensor<13x21xf32>, tensor<2x2xi32>) -> tensor<?x?xf32>
169+
return %1 : tensor<?x?xf32>
170+
}
171+
172+
// -----
173+
174+
func.func @test_pad_incorrect_input(%arg0: f32, %arg1: i32) -> f32 {
175+
// expected-error@+1 {{'tosa.pad' op operand #0 must be ranked tensor of number values, but got 'f32'}}
176+
%1 = "tosa.pad"(%arg0, %arg1) : (f32, i32) -> f32
177+
return %1 : f32
178+
}
179+
180+
// -----
181+
182+
func.func @test_pad_zero_rank_input(%arg0: tensor<f32>, %arg1: tensor<i32>) -> tensor<f32> {
183+
// expected-error@+1 {{'tosa.pad' op input tensor rank must not be 0}}
184+
%1 = "tosa.pad"(%arg0, %arg1) : (tensor<f32>, tensor<i32>) -> tensor<f32>
185+
return %1 : tensor<f32>
72186
}
73187

74188
// -----

0 commit comments

Comments
 (0)