|
21 | 21 | #include "swift/SILOptimizer/Utils/CanonicalizeInstruction.h"
|
22 | 22 | #include "swift/SIL/DebugUtils.h"
|
23 | 23 | #include "swift/SIL/InstructionUtils.h"
|
| 24 | +#include "swift/SIL/MemAccessUtils.h" |
24 | 25 | #include "swift/SIL/Projection.h"
|
25 | 26 | #include "swift/SIL/SILBuilder.h"
|
26 | 27 | #include "swift/SIL/SILFunction.h"
|
@@ -137,6 +138,77 @@ static void replaceUsesOfExtract(SingleValueInstruction *extract,
|
137 | 138 | extract->replaceAllUsesWith(loadedVal);
|
138 | 139 | }
|
139 | 140 |
|
| 141 | +// If \p loadInst has an debug uses, then move it into a separate unsafe access |
| 142 | +// scope. This hides it from the exclusivity checker. |
| 143 | +// |
| 144 | +// If \p loadInst was successfully hidden, then this returns the next |
| 145 | +// instruction following \p loadInst and following any newly inserted |
| 146 | +// instructions. Otherwise this returns nullptr. Returning nullptr is a signal |
| 147 | +// to delete \p loadInst. |
| 148 | +// |
| 149 | +// Before: |
| 150 | +// |
| 151 | +// %a = begin_access %0 [read] [unknown] |
| 152 | +// %proj = some_projections %a |
| 153 | +// %whole = load %proj // <-- loadInst |
| 154 | +// %field = struct_element_addr %proj, #field |
| 155 | +// %part = load %field |
| 156 | +// |
| 157 | +// After: |
| 158 | +// |
| 159 | +// %a = begin_access %0 [read] [unknown] |
| 160 | +// %proj = some_projections %a |
| 161 | +// %a2 = begin_access %0 [read] [unsafe] // NEW |
| 162 | +// %proj2 = some_projections %a // CLONED |
| 163 | +// %whole = load %proj2 // <-- loadInst |
| 164 | +// end_access %a2 // NEW |
| 165 | +// %field = struct_element_addr %proj, #field |
| 166 | +// %part = load %field |
| 167 | +// |
| 168 | +static SILInstruction * |
| 169 | +moveLoadToUnsafeAccessScope(LoadInst *loadInst, |
| 170 | + CanonicalizeInstruction &pass) { |
| 171 | + if (llvm::none_of(loadInst->getUses(), [](Operand *use) { |
| 172 | + return use->getUser()->isDebugInstruction(); |
| 173 | + })) { |
| 174 | + return nullptr; |
| 175 | + } |
| 176 | + SILValue accessScope = getAccessScope(loadInst->getOperand()); |
| 177 | + auto *access = dyn_cast<BeginAccessInst>(accessScope); |
| 178 | + if (access && access->getEnforcement() == SILAccessEnforcement::Unsafe) |
| 179 | + return nullptr; |
| 180 | + |
| 181 | + auto checkBaseAddress = [=](SILValue addr) { |
| 182 | + if (addr != accessScope) |
| 183 | + return SILValue(); |
| 184 | + |
| 185 | + // the base of the new unsafe scope |
| 186 | + if (access) |
| 187 | + return access->getOperand(); |
| 188 | + |
| 189 | + return accessScope; |
| 190 | + }; |
| 191 | + |
| 192 | + if (!canCloneUseDefChain(loadInst->getOperand(), checkBaseAddress)) |
| 193 | + return nullptr; |
| 194 | + |
| 195 | + SILValue newBase = |
| 196 | + cloneUseDefChain(loadInst->getOperand(), loadInst, checkBaseAddress); |
| 197 | + |
| 198 | + auto *beginUnsafe = SILBuilderWithScope(loadInst).createBeginAccess( |
| 199 | + loadInst->getLoc(), newBase, SILAccessKind::Read, |
| 200 | + SILAccessEnforcement::Unsafe, true, false); |
| 201 | + loadInst->setOperand(beginUnsafe); |
| 202 | + auto nextInst = loadInst->getNextInstruction(); |
| 203 | + auto *endUnsafe = SILBuilderWithScope(nextInst).createEndAccess( |
| 204 | + loadInst->getLoc(), beginUnsafe, false); |
| 205 | + |
| 206 | + pass.notifyNewInstruction(beginUnsafe); |
| 207 | + pass.notifyNewInstruction(endUnsafe); |
| 208 | + |
| 209 | + return nextInst; |
| 210 | +} |
| 211 | + |
140 | 212 | // Given a load with multiple struct_extracts/tuple_extracts and no other uses,
|
141 | 213 | // canonicalize the load into several (struct_element_addr (load)) pairs.
|
142 | 214 | //
|
@@ -301,16 +373,9 @@ splitAggregateLoad(LoadOperation loadInst, CanonicalizeInstruction &pass) {
|
301 | 373 | }
|
302 | 374 | pass.notifyNewInstruction(**lastNewLoad);
|
303 | 375 |
|
304 |
| - // FIXME: This drops debug info at -Onone load-splitting is required at |
305 |
| - // -Onone for exclusivity diagnostics. Fix this by |
306 |
| - // |
307 |
| - // 1. At -Onone, preserve the original load when pass.preserveDebugInfo is |
308 |
| - // true, but moving it out of its current access scope and into an "unknown" |
309 |
| - // access scope, which won't be enforced as an exclusivity violation. |
310 |
| - // |
311 |
| - // 2. At -O, create "debug fragments" recover as much debug info as possible |
312 |
| - // by creating debug_value fragments for each new partial load. Currently |
313 |
| - // disabled because of LLVM back-end crashes. |
| 376 | + // FIXME: At -O, create "debug fragments" recover as much debug info as |
| 377 | + // possible by creating debug_value fragments for each new partial |
| 378 | + // load. Currently disabled because it caused on LLVM back-end crash. |
314 | 379 | if (!pass.preserveDebugInfo && EnableLoadSplittingDebugInfo) {
|
315 | 380 | createDebugFragments(*loadInst, proj, lastNewLoad->getLoadInst());
|
316 | 381 | }
|
@@ -340,13 +405,23 @@ splitAggregateLoad(LoadOperation loadInst, CanonicalizeInstruction &pass) {
|
340 | 405 | for (auto *borrow : borrows)
|
341 | 406 | nextII = killInstAndIncidentalUses(borrow, nextII, pass);
|
342 | 407 |
|
| 408 | + // When pass.preserveDebugInfo is true, keep the original load so that debug |
| 409 | + // info refers to the loaded value, rather than a memory location which may |
| 410 | + // not be reused. Move the wide load out of its current access scope and into |
| 411 | + // an "unknown" access scope, which won't be enforced as an exclusivity |
| 412 | + // violation. |
| 413 | + if (pass.preserveDebugInfo) { |
| 414 | + if (auto *regularLoad = dyn_cast<LoadInst>(loadInst.getLoadInst())) { |
| 415 | + if (auto *nextInst = moveLoadToUnsafeAccessScope(regularLoad, pass)) |
| 416 | + return nextInst->getIterator(); |
| 417 | + } |
| 418 | + } |
343 | 419 | // Erase the old load.
|
344 | 420 | for (auto *destroy : lifetimeEndingInsts)
|
345 | 421 | nextII = killInstruction(destroy, nextII, pass);
|
346 | 422 |
|
347 | 423 | // FIXME: remove this temporary hack to advance the iterator beyond
|
348 |
| - // debug_value. A soon-to-be merged commit migrates CanonicalizeInstruction to |
349 |
| - // use InstructionDeleter. |
| 424 | + // debug_value. |
350 | 425 | while (nextII != loadInst->getParent()->end()
|
351 | 426 | && nextII->isDebugInstruction()) {
|
352 | 427 | ++nextII;
|
|
0 commit comments