Skip to content

Commit 31b72b0

Browse files
[mlir][sparse]Make isBlockSparsity more robust (#75113)
1. A single dimension can either be blocked (with floordiv and mod pair) or non-blocked. Mixing them would be invalid. 2. Block size should be non-zero value.
1 parent 0661309 commit 31b72b0

File tree

2 files changed

+57
-2
lines changed

2 files changed

+57
-2
lines changed

mlir/lib/Dialect/SparseTensor/IR/SparseTensorDialect.cpp

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -856,12 +856,13 @@ bool mlir::sparse_tensor::isBlockSparsity(AffineMap dimToLvl) {
856856
if (!dimToLvl)
857857
return false;
858858
std::map<unsigned, int64_t> coeffientMap;
859+
bool hasBlock = false;
859860
for (auto result : dimToLvl.getResults()) {
860861
if (auto binOp = dyn_cast<AffineBinaryOpExpr>(result)) {
861862
// Check for "dim op const".
862863
auto dimOp = dyn_cast<AffineDimExpr>(binOp.getLHS());
863864
auto conOp = dyn_cast<AffineConstantExpr>(binOp.getRHS());
864-
if (!dimOp || !conOp)
865+
if (!dimOp || !conOp || conOp.getValue() <= 0)
865866
return false;
866867
// Inspect "dim / const" or "dim % const".
867868
auto pos = dimOp.getPosition();
@@ -878,12 +879,21 @@ bool mlir::sparse_tensor::isBlockSparsity(AffineMap dimToLvl) {
878879
// Expect mod to have the same coefficient as floordiv.
879880
if (conOp.getValue() != coeffientMap[pos])
880881
return false;
882+
hasBlock = true;
881883
} else {
882884
return false;
883885
}
886+
} else if (auto dimOp = dyn_cast<AffineDimExpr>(result)) {
887+
auto pos = dimOp.getPosition();
888+
// Expect dim to be unset.
889+
if (coeffientMap.find(pos) != coeffientMap.end())
890+
return false;
891+
coeffientMap[pos] = 0;
892+
} else {
893+
return false;
884894
}
885895
}
886-
return !coeffientMap.empty();
896+
return hasBlock;
887897
}
888898

889899
bool mlir::sparse_tensor::hasAnyNonIdentityOperandsOrResults(Operation *op) {

mlir/test/Dialect/SparseTensor/invalid_encoding.mlir

Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -254,6 +254,51 @@ func.func private @wrong_order_lvl_decl(%arg0: tensor<?x?xf64, #WrongOrderLvlDec
254254

255255
// -----
256256

257+
// expected-error@+1 {{failed to infer lvlToDim from dimToLvl}}
258+
#BSR = #sparse_tensor.encoding<{
259+
map = ( i, j ) ->
260+
( i floordiv 2 : dense,
261+
j floordiv 3 : compressed,
262+
i : dense,
263+
j mod 3 : dense
264+
)
265+
}>
266+
func.func private @BSR(%arg0: tensor<?x?xf64, #BSR>) {
267+
return
268+
}
269+
270+
// -----
271+
272+
// expected-error@+1 {{failed to infer lvlToDim from dimToLvl}}
273+
#BSR = #sparse_tensor.encoding<{
274+
map = ( i, j ) ->
275+
( i : dense,
276+
j floordiv 3 : compressed,
277+
i floordiv 3 : dense,
278+
j mod 3 : dense
279+
)
280+
}>
281+
func.func private @BSR(%arg0: tensor<?x?xf64, #BSR>) {
282+
return
283+
}
284+
285+
// -----
286+
287+
// expected-error@+1 {{failed to infer lvlToDim from dimToLvl}}
288+
#BSR = #sparse_tensor.encoding<{
289+
map = ( i, j ) ->
290+
( i floordiv -3 : dense,
291+
j floordiv -3 : compressed,
292+
i mod 3 : dense,
293+
j mod 3 : dense
294+
)
295+
}>
296+
func.func private @BSR(%arg0: tensor<?x?xf64, #BSR>) {
297+
return
298+
}
299+
300+
// -----
301+
257302
// expected-error@+1 {{expected lvlToDim to be an inverse of dimToLvl}}
258303
#BSR_explicit = #sparse_tensor.encoding<{
259304
map =

0 commit comments

Comments
 (0)