Skip to content

Commit 07f383f

Browse files
author
Jakub Staszak
committed
Remove trailing spaces.
llvm-svn: 189173
1 parent 45025c0 commit 07f383f

File tree

1 file changed

+44
-45
lines changed

1 file changed

+44
-45
lines changed

llvm/lib/Analysis/BasicAliasAnalysis.cpp

Lines changed: 44 additions & 45 deletions
Original file line numberDiff line numberDiff line change
@@ -122,15 +122,15 @@ static bool isObjectSmallerThan(const Value *V, uint64_t Size,
122122
// question (in this case rewind to p), or
123123
// - just give up. It is up to caller to make sure the pointer is pointing
124124
// to the base address the object.
125-
//
125+
//
126126
// We go for 2nd option for simplicity.
127127
if (!isIdentifiedObject(V))
128128
return false;
129129

130130
// This function needs to use the aligned object size because we allow
131131
// reads a bit past the end given sufficient alignment.
132132
uint64_t ObjectSize = getObjectSize(V, TD, TLI, /*RoundToAlign*/true);
133-
133+
134134
return ObjectSize != AliasAnalysis::UnknownSize && ObjectSize < Size;
135135
}
136136

@@ -163,7 +163,7 @@ namespace {
163163
EK_SignExt,
164164
EK_ZeroExt
165165
};
166-
166+
167167
struct VariableGEPIndex {
168168
const Value *V;
169169
ExtensionKind Extension;
@@ -200,7 +200,7 @@ static Value *GetLinearExpression(Value *V, APInt &Scale, APInt &Offset,
200200
Offset = 0;
201201
return V;
202202
}
203-
203+
204204
if (BinaryOperator *BOp = dyn_cast<BinaryOperator>(V)) {
205205
if (ConstantInt *RHSC = dyn_cast<ConstantInt>(BOp->getOperand(1))) {
206206
switch (BOp->getOpcode()) {
@@ -231,7 +231,7 @@ static Value *GetLinearExpression(Value *V, APInt &Scale, APInt &Offset,
231231
}
232232
}
233233
}
234-
234+
235235
// Since GEP indices are sign extended anyway, we don't care about the high
236236
// bits of a sign or zero extended value - just scales and offsets. The
237237
// extensions have to be consistent though.
@@ -248,10 +248,10 @@ static Value *GetLinearExpression(Value *V, APInt &Scale, APInt &Offset,
248248
TD, Depth+1);
249249
Scale = Scale.zext(OldWidth);
250250
Offset = Offset.zext(OldWidth);
251-
251+
252252
return Result;
253253
}
254-
254+
255255
Scale = 1;
256256
Offset = 0;
257257
return V;
@@ -276,7 +276,7 @@ DecomposeGEPExpression(const Value *V, int64_t &BaseOffs,
276276
const DataLayout *TD) {
277277
// Limit recursion depth to limit compile time in crazy cases.
278278
unsigned MaxLookup = 6;
279-
279+
280280
BaseOffs = 0;
281281
do {
282282
// See if this is a bitcast or GEP.
@@ -291,7 +291,7 @@ DecomposeGEPExpression(const Value *V, int64_t &BaseOffs,
291291
}
292292
return V;
293293
}
294-
294+
295295
if (Op->getOpcode() == Instruction::BitCast) {
296296
V = Op->getOperand(0);
297297
continue;
@@ -308,15 +308,15 @@ DecomposeGEPExpression(const Value *V, int64_t &BaseOffs,
308308
V = Simplified;
309309
continue;
310310
}
311-
311+
312312
return V;
313313
}
314-
314+
315315
// Don't attempt to analyze GEPs over unsized objects.
316316
if (!cast<PointerType>(GEPOp->getOperand(0)->getType())
317317
->getElementType()->isSized())
318318
return V;
319-
319+
320320
// If we are lacking DataLayout information, we can't compute the offets of
321321
// elements computed by GEPs. However, we can handle bitcast equivalent
322322
// GEPs.
@@ -326,7 +326,7 @@ DecomposeGEPExpression(const Value *V, int64_t &BaseOffs,
326326
V = GEPOp->getOperand(0);
327327
continue;
328328
}
329-
329+
330330
// Walk the indices of the GEP, accumulating them into BaseOff/VarIndices.
331331
gep_type_iterator GTI = gep_type_begin(GEPOp);
332332
for (User::const_op_iterator I = GEPOp->op_begin()+1,
@@ -337,38 +337,37 @@ DecomposeGEPExpression(const Value *V, int64_t &BaseOffs,
337337
// For a struct, add the member offset.
338338
unsigned FieldNo = cast<ConstantInt>(Index)->getZExtValue();
339339
if (FieldNo == 0) continue;
340-
340+
341341
BaseOffs += TD->getStructLayout(STy)->getElementOffset(FieldNo);
342342
continue;
343343
}
344-
344+
345345
// For an array/pointer, add the element offset, explicitly scaled.
346346
if (ConstantInt *CIdx = dyn_cast<ConstantInt>(Index)) {
347347
if (CIdx->isZero()) continue;
348348
BaseOffs += TD->getTypeAllocSize(*GTI)*CIdx->getSExtValue();
349349
continue;
350350
}
351-
351+
352352
uint64_t Scale = TD->getTypeAllocSize(*GTI);
353353
ExtensionKind Extension = EK_NotExtended;
354-
354+
355355
// If the integer type is smaller than the pointer size, it is implicitly
356356
// sign extended to pointer size.
357357
unsigned Width = cast<IntegerType>(Index->getType())->getBitWidth();
358358
if (TD->getPointerSizeInBits() > Width)
359359
Extension = EK_SignExt;
360-
360+
361361
// Use GetLinearExpression to decompose the index into a C1*V+C2 form.
362362
APInt IndexScale(Width, 0), IndexOffset(Width, 0);
363363
Index = GetLinearExpression(Index, IndexScale, IndexOffset, Extension,
364364
*TD, 0);
365-
365+
366366
// The GEP index scale ("Scale") scales C1*V+C2, yielding (C1*V+C2)*Scale.
367367
// This gives us an aggregate computation of (C1*Scale)*V + C2*Scale.
368368
BaseOffs += IndexOffset.getSExtValue()*Scale;
369369
Scale *= IndexScale.getSExtValue();
370-
371-
370+
372371
// If we already had an occurrence of this index variable, merge this
373372
// scale into it. For example, we want to handle:
374373
// A[x][x] -> x*16 + x*4 -> x*20
@@ -381,33 +380,33 @@ DecomposeGEPExpression(const Value *V, int64_t &BaseOffs,
381380
break;
382381
}
383382
}
384-
383+
385384
// Make sure that we have a scale that makes sense for this target's
386385
// pointer size.
387386
if (unsigned ShiftBits = 64-TD->getPointerSizeInBits()) {
388387
Scale <<= ShiftBits;
389388
Scale = (int64_t)Scale >> ShiftBits;
390389
}
391-
390+
392391
if (Scale) {
393392
VariableGEPIndex Entry = {Index, Extension,
394393
static_cast<int64_t>(Scale)};
395394
VarIndices.push_back(Entry);
396395
}
397396
}
398-
397+
399398
// Analyze the base pointer next.
400399
V = GEPOp->getOperand(0);
401400
} while (--MaxLookup);
402-
401+
403402
// If the chain of expressions is too deep, just return early.
404403
return V;
405404
}
406405

407406
/// GetIndexDifference - Dest and Src are the variable indices from two
408407
/// decomposed GetElementPtr instructions GEP1 and GEP2 which have common base
409408
/// pointers. Subtract the GEP2 indices from GEP1 to find the symbolic
410-
/// difference between the two pointers.
409+
/// difference between the two pointers.
411410
static void GetIndexDifference(SmallVectorImpl<VariableGEPIndex> &Dest,
412411
const SmallVectorImpl<VariableGEPIndex> &Src) {
413412
if (Src.empty()) return;
@@ -416,12 +415,12 @@ static void GetIndexDifference(SmallVectorImpl<VariableGEPIndex> &Dest,
416415
const Value *V = Src[i].V;
417416
ExtensionKind Extension = Src[i].Extension;
418417
int64_t Scale = Src[i].Scale;
419-
418+
420419
// Find V in Dest. This is N^2, but pointer indices almost never have more
421420
// than a few variable indexes.
422421
for (unsigned j = 0, e = Dest.size(); j != e; ++j) {
423422
if (Dest[j].V != V || Dest[j].Extension != Extension) continue;
424-
423+
425424
// If we found it, subtract off Scale V's from the entry in Dest. If it
426425
// goes to zero, remove the entry.
427426
if (Dest[j].Scale != Scale)
@@ -431,7 +430,7 @@ static void GetIndexDifference(SmallVectorImpl<VariableGEPIndex> &Dest,
431430
Scale = 0;
432431
break;
433432
}
434-
433+
435434
// If we didn't consume this entry, add it to the end of the Dest list.
436435
if (Scale) {
437436
VariableGEPIndex Entry = { V, Extension, -Scale };
@@ -526,7 +525,7 @@ namespace {
526525
return (AliasAnalysis*)this;
527526
return this;
528527
}
529-
528+
530529
private:
531530
// AliasCache - Track alias queries to guard against recursion.
532531
typedef std::pair<Location, Location> LocPair;
@@ -696,7 +695,7 @@ BasicAliasAnalysis::getModRefInfo(ImmutableCallSite CS,
696695
"AliasAnalysis query involving multiple functions!");
697696

698697
const Value *Object = GetUnderlyingObject(Loc.Ptr, TD);
699-
698+
700699
// If this is a tail call and Loc.Ptr points to a stack location, we know that
701700
// the tail call cannot access or modify the local stack.
702701
// We cannot exclude byval arguments here; these belong to the caller of
@@ -706,7 +705,7 @@ BasicAliasAnalysis::getModRefInfo(ImmutableCallSite CS,
706705
if (const CallInst *CI = dyn_cast<CallInst>(CS.getInstruction()))
707706
if (CI->isTailCall())
708707
return NoModRef;
709-
708+
710709
// If the pointer is to a locally allocated object that does not escape,
711710
// then the call can not mod/ref the pointer unless the call takes the pointer
712711
// as an argument, and itself doesn't capture it.
@@ -722,7 +721,7 @@ BasicAliasAnalysis::getModRefInfo(ImmutableCallSite CS,
722721
if (!(*CI)->getType()->isPointerTy() ||
723722
(!CS.doesNotCapture(ArgNo) && !CS.isByValArgument(ArgNo)))
724723
continue;
725-
724+
726725
// If this is a no-capture pointer argument, see if we can tell that it
727726
// is impossible to alias the pointer we're checking. If not, we have to
728727
// assume that the call could touch the pointer, even though it doesn't
@@ -732,7 +731,7 @@ BasicAliasAnalysis::getModRefInfo(ImmutableCallSite CS,
732731
break;
733732
}
734733
}
735-
734+
736735
if (!PassedAsArg)
737736
return NoModRef;
738737
}
@@ -821,7 +820,7 @@ BasicAliasAnalysis::getModRefInfo(ImmutableCallSite CS,
821820
}
822821

823822
// We can bound the aliasing properties of memset_pattern16 just as we can
824-
// for memcpy/memset. This is particularly important because the
823+
// for memcpy/memset. This is particularly important because the
825824
// LoopIdiomRecognizer likes to turn loops into calls to memset_pattern16
826825
// whenever possible.
827826
else if (TLI.has(LibFunc::memset_pattern16) &&
@@ -925,35 +924,35 @@ BasicAliasAnalysis::aliasGEP(const GEPOperator *GEP1, uint64_t V1Size,
925924
GEP1VariableIndices.clear();
926925
}
927926
}
928-
927+
929928
// If we get a No or May, then return it immediately, no amount of analysis
930929
// will improve this situation.
931930
if (BaseAlias != MustAlias) return BaseAlias;
932-
931+
933932
// Otherwise, we have a MustAlias. Since the base pointers alias each other
934933
// exactly, see if the computed offset from the common pointer tells us
935934
// about the relation of the resulting pointer.
936935
const Value *GEP1BasePtr =
937936
DecomposeGEPExpression(GEP1, GEP1BaseOffset, GEP1VariableIndices, TD);
938-
937+
939938
int64_t GEP2BaseOffset;
940939
SmallVector<VariableGEPIndex, 4> GEP2VariableIndices;
941940
const Value *GEP2BasePtr =
942941
DecomposeGEPExpression(GEP2, GEP2BaseOffset, GEP2VariableIndices, TD);
943-
942+
944943
// DecomposeGEPExpression and GetUnderlyingObject should return the
945944
// same result except when DecomposeGEPExpression has no DataLayout.
946945
if (GEP1BasePtr != UnderlyingV1 || GEP2BasePtr != UnderlyingV2) {
947946
assert(TD == 0 &&
948947
"DecomposeGEPExpression and GetUnderlyingObject disagree!");
949948
return MayAlias;
950949
}
951-
950+
952951
// Subtract the GEP2 pointer from the GEP1 pointer to find out their
953952
// symbolic difference.
954953
GEP1BaseOffset -= GEP2BaseOffset;
955954
GetIndexDifference(GEP1VariableIndices, GEP2VariableIndices);
956-
955+
957956
} else {
958957
// Check to see if these two pointers are related by the getelementptr
959958
// instruction. If one pointer is a GEP with a non-zero index of the other
@@ -975,7 +974,7 @@ BasicAliasAnalysis::aliasGEP(const GEPOperator *GEP1, uint64_t V1Size,
975974

976975
const Value *GEP1BasePtr =
977976
DecomposeGEPExpression(GEP1, GEP1BaseOffset, GEP1VariableIndices, TD);
978-
977+
979978
// DecomposeGEPExpression and GetUnderlyingObject should return the
980979
// same result except when DecomposeGEPExpression has no DataLayout.
981980
if (GEP1BasePtr != UnderlyingV1) {
@@ -984,7 +983,7 @@ BasicAliasAnalysis::aliasGEP(const GEPOperator *GEP1, uint64_t V1Size,
984983
return MayAlias;
985984
}
986985
}
987-
986+
988987
// In the two GEP Case, if there is no difference in the offsets of the
989988
// computed pointers, the resultant pointers are a must alias. This
990989
// hapens when we have two lexically identical GEP's (for example).
@@ -1226,7 +1225,7 @@ BasicAliasAnalysis::aliasCheck(const Value *V1, uint64_t V1Size,
12261225
if ((isa<ConstantPointerNull>(O2) && isKnownNonNull(O1)) ||
12271226
(isa<ConstantPointerNull>(O1) && isKnownNonNull(O2)))
12281227
return NoAlias;
1229-
1228+
12301229
// If one pointer is the result of a call/invoke or load and the other is a
12311230
// non-escaping local object within the same function, then we know the
12321231
// object couldn't escape to a point where the call could return it.
@@ -1248,7 +1247,7 @@ BasicAliasAnalysis::aliasCheck(const Value *V1, uint64_t V1Size,
12481247
if ((V1Size != UnknownSize && isObjectSmallerThan(O2, V1Size, *TD, *TLI)) ||
12491248
(V2Size != UnknownSize && isObjectSmallerThan(O1, V2Size, *TD, *TLI)))
12501249
return NoAlias;
1251-
1250+
12521251
// Check the cache before climbing up use-def chains. This also terminates
12531252
// otherwise infinitely recursive queries.
12541253
LocPair Locs(Location(V1, V1Size, V1TBAAInfo),

0 commit comments

Comments
 (0)