summaryrefslogtreecommitdiffstats
path: root/llvm/lib/Transforms/InstCombine/InstCombineCasts.cpp
diff options
context:
space:
mode:
authorAnna Thomas <anna@azul.com>2016-07-08 15:18:56 +0000
committerAnna Thomas <anna@azul.com>2016-07-08 15:18:56 +0000
commit3124f6273ab4ce6e6ee6f9b6e8b9781824e6cdb1 (patch)
tree8bba84eb055c68c148b8e25ad073b978038ed794 /llvm/lib/Transforms/InstCombine/InstCombineCasts.cpp
parent68853ab2c5e4c64e9aee003f77ed5008ec8d0fe4 (diff)
downloadbcm5719-llvm-3124f6273ab4ce6e6ee6f9b6e8b9781824e6cdb1.tar.gz
bcm5719-llvm-3124f6273ab4ce6e6ee6f9b6e8b9781824e6cdb1.zip
InstCombine rule to fold truncs whose value is available
We can fold truncs whose operand feeds from a load, if the trunc value is available through a prior load/store. This change is from: http://reviews.llvm.org/D21246, which folded the trunc but missed the bitcast or ptrtoint/inttoptr required in the RAUW call, when the load type didnt match the prior load/store type. Differential Revision: http://reviews.llvm.org/D21791 llvm-svn: 274853
Diffstat (limited to 'llvm/lib/Transforms/InstCombine/InstCombineCasts.cpp')
-rw-r--r--llvm/lib/Transforms/InstCombine/InstCombineCasts.cpp24
1 files changed, 23 insertions, 1 deletions
diff --git a/llvm/lib/Transforms/InstCombine/InstCombineCasts.cpp b/llvm/lib/Transforms/InstCombine/InstCombineCasts.cpp
index 20556157188..395e83661d5 100644
--- a/llvm/lib/Transforms/InstCombine/InstCombineCasts.cpp
+++ b/llvm/lib/Transforms/InstCombine/InstCombineCasts.cpp
@@ -13,9 +13,10 @@
#include "InstCombineInternal.h"
#include "llvm/Analysis/ConstantFolding.h"
+#include "llvm/Analysis/Loads.h"
+#include "llvm/Analysis/TargetLibraryInfo.h"
#include "llvm/IR/DataLayout.h"
#include "llvm/IR/PatternMatch.h"
-#include "llvm/Analysis/TargetLibraryInfo.h"
using namespace llvm;
using namespace PatternMatch;
@@ -575,6 +576,27 @@ Instruction *InstCombiner::visitTrunc(TruncInst &CI) {
if (Instruction *I = foldVecTruncToExtElt(CI, *this, DL))
return I;
+ // When trunc operand is a widened load, see if we can get the value from a
+ // previous store/load
+ if (auto *LI = dyn_cast<LoadInst>(Src)) {
+ BasicBlock::iterator BBI(*LI);
+
+ // Scan a few instructions up from LI and if we find a partial load/store
+ // of Type DestTy that feeds into LI, we can replace all uses of the trunc
+ // with the load/store value.
+ // This replacement can be done only in the case of non-volatile loads, with
+ // ordering at most unordered. If the load is atomic, its only use should be
+ // the trunc instruction. We don't want to allow other users of LI to see a
+ // value that is out of sync with the value we're folding the trunc to (in
+ // case of a race).
+ if (LI->isUnordered() && (!LI->isAtomic() || LI->hasOneUse()))
+ if (Value *AvailableVal = FindAvailableLoadedValue(
+ LI->getPointerOperand(), DestTy, LI->isAtomic(), LI->getParent(),
+ BBI, DefMaxInstsToScan))
+ return replaceInstUsesWith(
+ CI, Builder->CreateBitOrPointerCast(AvailableVal, CI.getType(),
+ CI.getName() + ".cast"));
+ }
return nullptr;
}
OpenPOWER on IntegriCloud