@@ -153,10 +153,18 @@ class TileAllocator {
153
153
return failure ();
154
154
}
155
155
156
+ // / Acquires a specific tile ID. Asserts the tile is initially free.
157
+ void acquireTileId (ArmSMETileType tileType, unsigned tileId) {
158
+ TileMask tileMask = getMasks (tileType)[tileId];
159
+ assert ((tilesInUse & tileMask) == TileMask::kNone &&
160
+ " cannot acquire allocated tile!" );
161
+ tilesInUse |= tileMask;
162
+ }
163
+
156
164
// / Releases a previously allocated tile ID.
157
165
void releaseTileId (ArmSMETileType tileType, unsigned tileId) {
158
166
TileMask tileMask = getMasks (tileType)[tileId];
159
- assert ((tilesInUse & tileMask) != TileMask:: kNone &&
167
+ assert ((tilesInUse & tileMask) == tileMask &&
160
168
" cannot release unallocated tile!" );
161
169
tilesInUse ^= tileMask;
162
170
}
@@ -289,6 +297,11 @@ struct LiveRange {
289
297
.valid ();
290
298
}
291
299
300
+ // / Returns true if this range is active at `point` in the program.
301
+ bool overlaps (uint64_t point) const {
302
+ return ranges->lookup (point) == kValidLiveRange ;
303
+ }
304
+
292
305
// / Unions this live range with `otherRange`, aborts if the ranges overlap.
293
306
void unionWith (LiveRange const &otherRange) {
294
307
for (auto it = otherRange.ranges ->begin (); it != otherRange.ranges ->end ();
@@ -488,76 +501,139 @@ coalesceTileLiveRanges(DenseMap<Value, LiveRange> &initialLiveRanges) {
488
501
return std::move (coalescedLiveRanges);
489
502
}
490
503
491
- // / Choose a live range to spill (via some heuristics). This picks either an
492
- // / active live range from `activeRanges` or the new live range `newRange`.
493
- LiveRange *chooseSpillUsingHeuristics (ArrayRef<LiveRange *> activeRanges,
494
- LiveRange *newRange) {
504
+ // / Choose a live range to spill (via some heuristics). This picks either a live
505
+ // / range from `overlappingRanges`, or the new live range `newRange`.
506
+ template <typename OverlappingRangesIterator>
507
+ LiveRange *
508
+ chooseSpillUsingHeuristics (OverlappingRangesIterator overlappingRanges,
509
+ LiveRange *newRange) {
495
510
// Heuristic: Spill trivially copyable operations (usually free).
496
- auto isTrivialSpill = [&](LiveRange * allocatedRange) {
497
- return isTileTypeGreaterOrEqual (allocatedRange-> getTileType (),
511
+ auto isTrivialSpill = [&](LiveRange & allocatedRange) {
512
+ return isTileTypeGreaterOrEqual (allocatedRange. getTileType (),
498
513
newRange->getTileType ()) &&
499
- allocatedRange-> values .size () == 1 &&
514
+ allocatedRange. values .size () == 1 &&
500
515
isTriviallyCloneableTileOp (
501
- allocatedRange->values [0 ]
502
- .getDefiningOp <ArmSMETileOpInterface>());
516
+ allocatedRange.values [0 ].getDefiningOp <ArmSMETileOpInterface>());
503
517
};
504
- if (isTrivialSpill (newRange))
518
+ if (isTrivialSpill (* newRange))
505
519
return newRange;
506
- auto trivialSpill = llvm::find_if (activeRanges , isTrivialSpill);
507
- if (trivialSpill != activeRanges .end ())
508
- return *trivialSpill;
520
+ auto trivialSpill = llvm::find_if (overlappingRanges , isTrivialSpill);
521
+ if (trivialSpill != overlappingRanges .end ())
522
+ return & *trivialSpill;
509
523
510
524
// Heuristic: Spill the range that ends last (with a compatible tile type).
511
- auto isSmallerTileTypeOrEndsEarlier = [](LiveRange * a, LiveRange * b) {
512
- return !isTileTypeGreaterOrEqual (a-> getTileType (), b-> getTileType ()) ||
513
- a-> end () < b-> end ();
525
+ auto isSmallerTileTypeOrEndsEarlier = [](LiveRange & a, LiveRange & b) {
526
+ return !isTileTypeGreaterOrEqual (a. getTileType (), b. getTileType ()) ||
527
+ a. end () < b. end ();
514
528
};
515
- LiveRange *lastActiveLiveRange = *std::max_element (
516
- activeRanges.begin (), activeRanges.end (), isSmallerTileTypeOrEndsEarlier);
517
- if (!isSmallerTileTypeOrEndsEarlier (lastActiveLiveRange, newRange))
518
- return lastActiveLiveRange;
529
+ LiveRange &latestEndingLiveRange =
530
+ *std::max_element (overlappingRanges.begin (), overlappingRanges.end (),
531
+ isSmallerTileTypeOrEndsEarlier);
532
+ if (!isSmallerTileTypeOrEndsEarlier (latestEndingLiveRange, *newRange))
533
+ return &latestEndingLiveRange;
519
534
return newRange;
520
535
}
521
536
522
537
// / Greedily allocate tile IDs to live ranges. Spill using simple heuristics.
523
- // / Note: This does not attempt to fill holes in active live ranges.
524
538
void allocateTilesToLiveRanges (
525
539
ArrayRef<LiveRange *> liveRangesSortedByStartPoint) {
526
540
TileAllocator tileAllocator;
541
+ // `activeRanges` = Live ranges that need to be in a tile at the
542
+ // `currentPoint` in the program.
527
543
SetVector<LiveRange *> activeRanges;
544
+ // `inactiveRanges` = Live ranges that _do not_ need to be in a tile
545
+ // at the `currentPoint` in the program but could become active again later.
546
+ // An inactive section of a live range can be seen as a 'hole' in the live
547
+ // range, where it is possible to reuse the live range's tile ID _before_ it
548
+ // has ended. By identifying 'holes', the allocator can reuse tiles more
549
+ // often, which helps avoid costly tile spills.
550
+ SetVector<LiveRange *> inactiveRanges;
528
551
for (LiveRange *nextRange : liveRangesSortedByStartPoint) {
529
- // Release tile IDs from live ranges that have ended.
552
+ auto currentPoint = nextRange->start ();
553
+ // 1. Update the `activeRanges` at `currentPoint`.
530
554
activeRanges.remove_if ([&](LiveRange *activeRange) {
531
- if (activeRange->end () <= nextRange->start ()) {
555
+ // Check for live ranges that have expired.
556
+ if (activeRange->end () <= currentPoint) {
532
557
tileAllocator.releaseTileId (activeRange->getTileType (),
533
558
*activeRange->tileId );
534
559
return true ;
535
560
}
561
+ // Check for live ranges that have become inactive.
562
+ if (!activeRange->overlaps (currentPoint)) {
563
+ tileAllocator.releaseTileId (activeRange->getTileType (),
564
+ *activeRange->tileId );
565
+ inactiveRanges.insert (activeRange);
566
+ return true ;
567
+ }
536
568
return false ;
537
569
});
570
+ // 2. Update the `inactiveRanges` at `currentPoint`.
571
+ inactiveRanges.remove_if ([&](LiveRange *inactiveRange) {
572
+ // Check for live ranges that have expired.
573
+ if (inactiveRange->end () <= currentPoint) {
574
+ return true ;
575
+ }
576
+ // Check for live ranges that have become active.
577
+ if (inactiveRange->overlaps (currentPoint)) {
578
+ tileAllocator.acquireTileId (inactiveRange->getTileType (),
579
+ *inactiveRange->tileId );
580
+ activeRanges.insert (inactiveRange);
581
+ return true ;
582
+ }
583
+ return false ;
584
+ });
585
+
586
+ // 3. Collect inactive live ranges that overlap with the new live range.
587
+ // Note: The overlap checks in steps 1 and 2 only look at the `currentPoint`
588
+ // whereas this checks if there is an overlap at any future point too.
589
+ SmallVector<LiveRange *> overlappingInactiveRanges;
590
+ for (LiveRange *inactiveRange : inactiveRanges) {
591
+ if (inactiveRange->overlaps (*nextRange)) {
592
+ // We need to reserve the tile IDs of overlapping inactive ranges to
593
+ // prevent two (overlapping) live ranges from getting the same tile ID.
594
+ tileAllocator.acquireTileId (inactiveRange->getTileType (),
595
+ *inactiveRange->tileId );
596
+ overlappingInactiveRanges.push_back (inactiveRange);
597
+ }
598
+ }
538
599
539
- // Allocate a tile ID to `nextRange`.
600
+ // 4. Allocate a tile ID to `nextRange`.
540
601
auto rangeTileType = nextRange->getTileType ();
541
602
auto tileId = tileAllocator.allocateTileId (rangeTileType);
542
603
if (succeeded (tileId)) {
543
604
nextRange->tileId = *tileId;
544
605
} else {
606
+ // Create an iterator over all overlapping live ranges.
607
+ auto allOverlappingRanges = llvm::concat<LiveRange>(
608
+ llvm::make_pointee_range (activeRanges.getArrayRef ()),
609
+ llvm::make_pointee_range (overlappingInactiveRanges));
610
+ // Choose an overlapping live range to spill.
545
611
LiveRange *rangeToSpill =
546
- chooseSpillUsingHeuristics (activeRanges. getArrayRef () , nextRange);
612
+ chooseSpillUsingHeuristics (allOverlappingRanges , nextRange);
547
613
if (rangeToSpill != nextRange) {
548
- // Spill an active live range (so release its tile ID first).
614
+ // Spill an (in) active live range (so release its tile ID first).
549
615
tileAllocator.releaseTileId (rangeToSpill->getTileType (),
550
616
*rangeToSpill->tileId );
551
- activeRanges.remove (rangeToSpill);
552
617
// This will always succeed after a spill (of an active live range).
553
618
nextRange->tileId = *tileAllocator.allocateTileId (rangeTileType);
619
+ // Remove the live range from the active/inactive sets.
620
+ if (!activeRanges.remove (rangeToSpill)) {
621
+ bool removed = inactiveRanges.remove (rangeToSpill);
622
+ assert (removed && " expected a range to be removed!" );
623
+ }
554
624
}
555
625
rangeToSpill->tileId = tileAllocator.allocateInMemoryTileId ();
556
626
}
557
627
558
- // Insert the live range into the active ranges.
628
+ // 5. Insert the live range into the active ranges.
559
629
if (nextRange->tileId < kInMemoryTileIdBase )
560
630
activeRanges.insert (nextRange);
631
+
632
+ // 6. Release tiles reserved for inactive live ranges (in step 3).
633
+ for (LiveRange *range : overlappingInactiveRanges) {
634
+ if (*range->tileId < kInMemoryTileIdBase )
635
+ tileAllocator.releaseTileId (range->getTileType (), *range->tileId );
636
+ }
561
637
}
562
638
}
563
639
0 commit comments