diff options
| author | Bob Wilson <bob.wilson@apple.com> | 2010-01-29 19:19:08 +0000 | 
|---|---|---|
| committer | Bob Wilson <bob.wilson@apple.com> | 2010-01-29 19:19:08 +0000 | 
| commit | 7c42b9d51e7a05c6b2019f0481eb041a8dd1c1a1 (patch) | |
| tree | a5e7e7e8dd1774873dd7e9d44683884617331197 /llvm/lib/Transforms | |
| parent | 65eb86e91252791cea630e6ebcae8a5099aa24cf (diff) | |
| download | bcm5719-llvm-7c42b9d51e7a05c6b2019f0481eb041a8dd1c1a1.tar.gz bcm5719-llvm-7c42b9d51e7a05c6b2019f0481eb041a8dd1c1a1.zip  | |
Improve isSafeToLoadUnconditionally to recognize that GEPs with constant
indices are safe if the result is known to be within the bounds of the
underlying object.
llvm-svn: 94829
Diffstat (limited to 'llvm/lib/Transforms')
| -rw-r--r-- | llvm/lib/Transforms/InstCombine/InstCombineLoadStoreAlloca.cpp | 4 | ||||
| -rw-r--r-- | llvm/lib/Transforms/Scalar/GVN.cpp | 3 | ||||
| -rw-r--r-- | llvm/lib/Transforms/Utils/Local.cpp | 69 | 
3 files changed, 65 insertions, 11 deletions
diff --git a/llvm/lib/Transforms/InstCombine/InstCombineLoadStoreAlloca.cpp b/llvm/lib/Transforms/InstCombine/InstCombineLoadStoreAlloca.cpp index ae728ddecfd..306ed6728bc 100644 --- a/llvm/lib/Transforms/InstCombine/InstCombineLoadStoreAlloca.cpp +++ b/llvm/lib/Transforms/InstCombine/InstCombineLoadStoreAlloca.cpp @@ -199,8 +199,8 @@ Instruction *InstCombiner::visitLoadInst(LoadInst &LI) {      //      if (SelectInst *SI = dyn_cast<SelectInst>(Op)) {        // load (select (Cond, &V1, &V2))  --> select(Cond, load &V1, load &V2). -      if (isSafeToLoadUnconditionally(SI->getOperand(1), SI) && -          isSafeToLoadUnconditionally(SI->getOperand(2), SI)) { +      if (isSafeToLoadUnconditionally(SI->getOperand(1), SI, TD) && +          isSafeToLoadUnconditionally(SI->getOperand(2), SI, TD)) {          Value *V1 = Builder->CreateLoad(SI->getOperand(1),                                          SI->getOperand(1)->getName()+".val");          Value *V2 = Builder->CreateLoad(SI->getOperand(2), diff --git a/llvm/lib/Transforms/Scalar/GVN.cpp b/llvm/lib/Transforms/Scalar/GVN.cpp index 292a4b311dd..9c184526650 100644 --- a/llvm/lib/Transforms/Scalar/GVN.cpp +++ b/llvm/lib/Transforms/Scalar/GVN.cpp @@ -1650,7 +1650,8 @@ bool GVN::processNonLocalLoad(LoadInst *LI,    // put anywhere; this can be improved, but should be conservatively safe.    if (!allSingleSucc &&        // FIXME: REEVALUTE THIS. -      !isSafeToLoadUnconditionally(LoadPtr, UnavailablePred->getTerminator())) { +      !isSafeToLoadUnconditionally(LoadPtr, +                                   UnavailablePred->getTerminator(), TD)) {      assert(NewInsts.empty() && "Should not have inserted instructions");      return false;    } diff --git a/llvm/lib/Transforms/Utils/Local.cpp b/llvm/lib/Transforms/Utils/Local.cpp index 92bdf2de449..f0097d0362c 100644 --- a/llvm/lib/Transforms/Utils/Local.cpp +++ b/llvm/lib/Transforms/Utils/Local.cpp @@ -38,20 +38,73 @@ using namespace llvm;  //  Local analysis.  // +/// getUnderlyingObjectWithOffset - Strip off up to MaxLookup GEPs and +/// bitcasts to get back to the underlying object being addressed, keeping +/// track of the offset in bytes from the GEPs relative to the result. +/// This is closely related to Value::getUnderlyingObject but is located +/// here to avoid making VMCore depend on TargetData. +static Value *getUnderlyingObjectWithOffset(Value *V, const TargetData *TD, +                                            unsigned &ByteOffset, +                                            unsigned MaxLookup = 6) { +  if (!isa<PointerType>(V->getType())) +    return V; +  for (unsigned Count = 0; MaxLookup == 0 || Count < MaxLookup; ++Count) { +    if (GEPOperator *GEP = dyn_cast<GEPOperator>(V)) { +      if (!GEP->hasAllConstantIndices()) +        return V; +      SmallVector<Value*, 8> Indices(GEP->op_begin() + 1, GEP->op_end()); +      ByteOffset += TD->getIndexedOffset(GEP->getPointerOperandType(), +                                         &Indices[0], Indices.size()); +      V = GEP->getPointerOperand(); +    } else if (Operator::getOpcode(V) == Instruction::BitCast) { +      V = cast<Operator>(V)->getOperand(0); +    } else if (GlobalAlias *GA = dyn_cast<GlobalAlias>(V)) { +      if (GA->mayBeOverridden()) +        return V; +      V = GA->getAliasee(); +    } else { +      return V; +    } +    assert(isa<PointerType>(V->getType()) && "Unexpected operand type!"); +  } +  return V; +} +  /// isSafeToLoadUnconditionally - Return true if we know that executing a load  /// from this value cannot trap.  If it is not obviously safe to load from the  /// specified pointer, we do a quick local scan of the basic block containing  /// ScanFrom, to determine if the address is already accessed. -bool llvm::isSafeToLoadUnconditionally(Value *V, Instruction *ScanFrom) { -  // If it is an alloca it is always safe to load from. -  if (isa<AllocaInst>(V)) return true; +bool llvm::isSafeToLoadUnconditionally(Value *V, Instruction *ScanFrom, +                                       const TargetData *TD) { +  unsigned ByteOffset = 0; +  Value *Base = V; +  if (TD) +    Base = getUnderlyingObjectWithOffset(V, TD, ByteOffset); + +  const Type *BaseType = 0; +  if (const AllocaInst *AI = dyn_cast<AllocaInst>(Base)) +    // If it is an alloca it is always safe to load from. +    BaseType = AI->getAllocatedType(); +  else if (const GlobalValue *GV = dyn_cast<GlobalValue>(Base)) { +    // Global variables are safe to load from but their size cannot be +    // guaranteed if they are overridden. +    if (!isa<GlobalAlias>(GV) && !GV->mayBeOverridden()) +      BaseType = GV->getType()->getElementType(); +  } -  // If it is a global variable it is mostly safe to load from. -  if (const GlobalValue *GV = dyn_cast<GlobalVariable>(V)) -    // Don't try to evaluate aliases.  External weak GV can be null. -    return !isa<GlobalAlias>(GV) && !GV->hasExternalWeakLinkage(); +  if (BaseType) { +    if (!TD) +      return true; // Loading directly from an alloca or global is OK. +    if (BaseType->isSized()) { +      // Check if the load is within the bounds of the underlying object. +      const PointerType *AddrTy = cast<PointerType>(V->getType()); +      unsigned LoadSize = TD->getTypeStoreSize(AddrTy->getElementType()); +      if (ByteOffset + LoadSize <= TD->getTypeAllocSize(BaseType)) +        return true; +    } +  } -  // Otherwise, be a little bit agressive by scanning the local block where we +  // Otherwise, be a little bit aggressive by scanning the local block where we    // want to check to see if the pointer is already being loaded or stored    // from/to.  If so, the previous load or store would have already trapped,    // so there is no harm doing an extra load (also, CSE will later eliminate  | 

