summaryrefslogtreecommitdiffstats
path: root/llvm/lib/Analysis/Loads.cpp
diff options
context:
space:
mode:
authorSanjoy Das <sanjoy@playingwithpointers.com>2016-06-01 16:47:45 +0000
committerSanjoy Das <sanjoy@playingwithpointers.com>2016-06-01 16:47:45 +0000
commit10df497a1f4766e1c5ca598ba5fd08085f6d263c (patch)
tree9f24fb1cb29ce5fc794a097a1541f5da44157490 /llvm/lib/Analysis/Loads.cpp
parentf10c8290fac5b16511ac48d676c50765eb92ddae (diff)
downloadbcm5719-llvm-10df497a1f4766e1c5ca598ba5fd08085f6d263c.tar.gz
bcm5719-llvm-10df497a1f4766e1c5ca598ba5fd08085f6d263c.zip
Reduce dependence on pointee types when deducing dereferenceability
Summary: Change some of the internal interfaces in Loads.cpp to keep track of the number of bytes we're trying to prove dereferenceable using an explicit `Size` parameter. Before this, the `Size` parameter was implicitly inferred from the pointee type of the pointer whose dereferenceability we were trying to prove, causing us to be conservative around bitcasts. This was unfortunate since bitcast instructions are no-ops and should never break optimizations. With an explicit `Size` parameter, we're more precise (as shown in the test cases), and the code is simpler. We should eventually move towards a `DerefQuery` struct that groups together a base pointer, an offset, a size and an alignment; but this patch is a first step. Reviewers: apilipenko, dblaikie, hfinkel, reames Subscribers: mcrosier, llvm-commits Differential Revision: http://reviews.llvm.org/D20764 llvm-svn: 271406
Diffstat (limited to 'llvm/lib/Analysis/Loads.cpp')
-rw-r--r--llvm/lib/Analysis/Loads.cpp107
1 files changed, 35 insertions, 72 deletions
diff --git a/llvm/lib/Analysis/Loads.cpp b/llvm/lib/Analysis/Loads.cpp
index b0355ccee87..170782d84bb 100644
--- a/llvm/lib/Analysis/Loads.cpp
+++ b/llvm/lib/Analysis/Loads.cpp
@@ -25,39 +25,23 @@
using namespace llvm;
-static bool isDereferenceableFromAttribute(const Value *BV, APInt Offset,
- Type *Ty, const DataLayout &DL,
+static bool isDereferenceableFromAttribute(const Value *BV, APInt Size,
+ const DataLayout &DL,
const Instruction *CtxI,
const DominatorTree *DT,
const TargetLibraryInfo *TLI) {
- assert(Offset.isNonNegative() && "offset can't be negative");
- assert(Ty->isSized() && "must be sized");
-
bool CheckForNonNull = false;
- APInt DerefBytes(Offset.getBitWidth(),
+ APInt DerefBytes(Size.getBitWidth(),
BV->getPointerDereferenceableBytes(CheckForNonNull));
if (DerefBytes.getBoolValue())
- if (DerefBytes.uge(Offset + DL.getTypeStoreSize(Ty)))
+ if (DerefBytes.uge(Size))
if (!CheckForNonNull || isKnownNonNullAt(BV, CtxI, DT, TLI))
return true;
return false;
}
-static bool isDereferenceableFromAttribute(const Value *V, const DataLayout &DL,
- const Instruction *CtxI,
- const DominatorTree *DT,
- const TargetLibraryInfo *TLI) {
- Type *VTy = V->getType();
- Type *Ty = VTy->getPointerElementType();
- if (!Ty->isSized())
- return false;
-
- APInt Offset(DL.getTypeStoreSizeInBits(VTy), 0);
- return isDereferenceableFromAttribute(V, Offset, Ty, DL, CtxI, DT, TLI);
-}
-
static bool isAligned(const Value *Base, APInt Offset, unsigned Align,
const DataLayout &DL) {
APInt BaseAlign(Offset.getBitWidth(), Base->getPointerAlignment(DL));
@@ -85,7 +69,7 @@ static bool isAligned(const Value *Base, unsigned Align, const DataLayout &DL) {
/// Test if V is always a pointer to allocated and suitably aligned memory for
/// a simple load or store.
static bool isDereferenceableAndAlignedPointer(
- const Value *V, unsigned Align, const DataLayout &DL,
+ const Value *V, unsigned Align, APInt Size, const DataLayout &DL,
const Instruction *CtxI, const DominatorTree *DT,
const TargetLibraryInfo *TLI, SmallPtrSetImpl<const Value *> &Visited) {
// Note that it is not safe to speculate into a malloc'd region because
@@ -93,65 +77,50 @@ static bool isDereferenceableAndAlignedPointer(
bool CheckForNonNull;
if (V->isPointerDereferenceable(CheckForNonNull)) {
- if (CheckForNonNull && !isKnownNonNullAt(V, CtxI, DT, TLI))
- return false;
- return isAligned(V, Align, DL);
+ Type *ETy = V->getType()->getPointerElementType();
+ if (ETy->isSized() && Size.ule(DL.getTypeStoreSize(ETy))) {
+ if (CheckForNonNull && !isKnownNonNullAt(V, CtxI, DT, TLI))
+ return false;
+ return isAligned(V, Align, DL);
+ }
}
- // It's not always safe to follow a bitcast, for example:
- // bitcast i8* (alloca i8) to i32*
- // would result in a 4-byte load from a 1-byte alloca. However,
- // if we're casting from a pointer from a type of larger size
- // to a type of smaller size (or the same size), and the alignment
- // is at least as large as for the resulting pointer type, then
- // we can look through the bitcast.
- if (const BitCastOperator *BC = dyn_cast<BitCastOperator>(V)) {
- Type *STy = BC->getSrcTy()->getPointerElementType(),
- *DTy = BC->getDestTy()->getPointerElementType();
- if (STy->isSized() && DTy->isSized() &&
- (DL.getTypeStoreSize(STy) >= DL.getTypeStoreSize(DTy)) &&
- (DL.getABITypeAlignment(STy) >= DL.getABITypeAlignment(DTy)))
- return isDereferenceableAndAlignedPointer(BC->getOperand(0), Align, DL,
- CtxI, DT, TLI, Visited);
- }
+ // bitcast instructions are no-ops as far as dereferenceability is concerned.
+ if (const BitCastOperator *BC = dyn_cast<BitCastOperator>(V))
+ return isDereferenceableAndAlignedPointer(BC->getOperand(0), Align, Size,
+ DL, CtxI, DT, TLI, Visited);
- if (isDereferenceableFromAttribute(V, DL, CtxI, DT, TLI))
+ if (isDereferenceableFromAttribute(V, Size, DL, CtxI, DT, TLI))
return isAligned(V, Align, DL);
// For GEPs, determine if the indexing lands within the allocated object.
if (const GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
- Type *Ty = GEP->getResultElementType();
const Value *Base = GEP->getPointerOperand();
- // Conservatively require that the base pointer be fully dereferenceable
- // and aligned.
- if (!Visited.insert(Base).second)
- return false;
- if (!isDereferenceableAndAlignedPointer(Base, Align, DL, CtxI, DT, TLI,
- Visited))
- return false;
-
APInt Offset(DL.getPointerTypeSizeInBits(GEP->getType()), 0);
- if (!GEP->accumulateConstantOffset(DL, Offset))
+ if (!GEP->accumulateConstantOffset(DL, Offset) || Offset.isNegative() ||
+ !Offset.urem(APInt(Offset.getBitWidth(), Align)).isMinValue())
return false;
- // Check if the load is within the bounds of the underlying object
- // and offset is aligned.
- uint64_t LoadSize = DL.getTypeStoreSize(Ty);
- Type *BaseType = GEP->getSourceElementType();
- assert(isPowerOf2_32(Align) && "must be a power of 2!");
- return (Offset + LoadSize).ule(DL.getTypeAllocSize(BaseType)) &&
- !(Offset & APInt(Offset.getBitWidth(), Align-1));
+ // If the base pointer is dereferenceable for Offset+Size bytes, then the
+ // GEP (== Base + Offset) is dereferenceable for Size bytes. If the base
+ // pointer is aligned to Align bytes, and the Offset is divisible by Align
+ // then the GEP (== Base + Offset == k_0 * Align + k_1 * Align) is also
+ // aligned to Align bytes.
+
+ return Visited.insert(Base).second &&
+ isDereferenceableAndAlignedPointer(Base, Align, Offset + Size, DL,
+ CtxI, DT, TLI, Visited);
}
// For gc.relocate, look through relocations
if (const GCRelocateInst *RelocateInst = dyn_cast<GCRelocateInst>(V))
return isDereferenceableAndAlignedPointer(
- RelocateInst->getDerivedPtr(), Align, DL, CtxI, DT, TLI, Visited);
+ RelocateInst->getDerivedPtr(), Align, Size, DL, CtxI, DT, TLI, Visited);
if (const AddrSpaceCastInst *ASC = dyn_cast<AddrSpaceCastInst>(V))
- return isDereferenceableAndAlignedPointer(ASC->getOperand(0), Align, DL,
- CtxI, DT, TLI, Visited);
+ return isDereferenceableAndAlignedPointer(ASC->getOperand(0), Align, Size,
+ DL, CtxI, DT, TLI, Visited);
// If we don't know, assume the worst.
return false;
@@ -173,19 +142,13 @@ bool llvm::isDereferenceableAndAlignedPointer(const Value *V, unsigned Align,
if (Align == 0)
Align = DL.getABITypeAlignment(Ty);
- if (Ty->isSized()) {
- APInt Offset(DL.getTypeStoreSizeInBits(VTy), 0);
- const Value *BV = V->stripAndAccumulateInBoundsConstantOffsets(DL, Offset);
-
- if (Offset.isNonNegative())
- if (isDereferenceableFromAttribute(BV, Offset, Ty, DL, CtxI, DT, TLI) &&
- isAligned(BV, Offset, Align, DL))
- return true;
- }
+ if (!Ty->isSized())
+ return false;
SmallPtrSet<const Value *, 32> Visited;
- return ::isDereferenceableAndAlignedPointer(V, Align, DL, CtxI, DT, TLI,
- Visited);
+ return ::isDereferenceableAndAlignedPointer(
+ V, Align, APInt(DL.getTypeSizeInBits(VTy), DL.getTypeStoreSize(Ty)), DL,
+ CtxI, DT, TLI, Visited);
}
bool llvm::isDereferenceablePointer(const Value *V, const DataLayout &DL,
OpenPOWER on IntegriCloud