summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorChandler Carruth <chandlerc@gmail.com>2012-04-06 01:11:52 +0000
committerChandler Carruth <chandlerc@gmail.com>2012-04-06 01:11:52 +0000
commite41f6f4189d2005ad747cb53bd9987fc7a41e3e8 (patch)
tree86558f86b3dccc7970874876f33555d9cff0c6e4
parente8628c5bc731ce04cd6f918733e1b83ae269a20c (diff)
downloadbcm5719-llvm-e41f6f4189d2005ad747cb53bd9987fc7a41e3e8.tar.gz
bcm5719-llvm-e41f6f4189d2005ad747cb53bd9987fc7a41e3e8.zip
Sink the return instruction collection until after we're done deleting
dead code, including dead return instructions in some cases. Otherwise, we end up having a bogus poniter to a return instruction that blows up much further down the road. It turns out that this pattern is both simpler to code, easier to update in the face of enhancements to the inliner cleanup, and likely cheaper given that it won't add dead instructions to the list. Thanks to John Regehr's numerous test cases for teasing this out. llvm-svn: 154157
-rw-r--r--llvm/lib/Transforms/Utils/CloneFunction.cpp16
-rw-r--r--llvm/test/Transforms/Inline/inline_cleanup.ll37
2 files changed, 46 insertions, 7 deletions
diff --git a/llvm/lib/Transforms/Utils/CloneFunction.cpp b/llvm/lib/Transforms/Utils/CloneFunction.cpp
index fe0060068b1..423f47d22bf 100644
--- a/llvm/lib/Transforms/Utils/CloneFunction.cpp
+++ b/llvm/lib/Transforms/Utils/CloneFunction.cpp
@@ -200,7 +200,6 @@ namespace {
const Function *OldFunc;
ValueToValueMapTy &VMap;
bool ModuleLevelChanges;
- SmallVectorImpl<ReturnInst*> &Returns;
const char *NameSuffix;
ClonedCodeInfo *CodeInfo;
const TargetData *TD;
@@ -208,13 +207,12 @@ namespace {
PruningFunctionCloner(Function *newFunc, const Function *oldFunc,
ValueToValueMapTy &valueMap,
bool moduleLevelChanges,
- SmallVectorImpl<ReturnInst*> &returns,
const char *nameSuffix,
ClonedCodeInfo *codeInfo,
const TargetData *td)
: NewFunc(newFunc), OldFunc(oldFunc),
VMap(valueMap), ModuleLevelChanges(moduleLevelChanges),
- Returns(returns), NameSuffix(nameSuffix), CodeInfo(codeInfo), TD(td) {
+ NameSuffix(nameSuffix), CodeInfo(codeInfo), TD(td) {
}
/// CloneBlock - The specified block is found to be reachable, clone it and
@@ -352,9 +350,6 @@ void PruningFunctionCloner::CloneBlock(const BasicBlock *BB,
CodeInfo->ContainsDynamicAllocas |= hasStaticAllocas &&
BB != &BB->getParent()->front();
}
-
- if (ReturnInst *RI = dyn_cast<ReturnInst>(NewBB->getTerminator()))
- Returns.push_back(RI);
}
/// CloneAndPruneFunctionInto - This works exactly like CloneFunctionInto,
@@ -381,7 +376,7 @@ void llvm::CloneAndPruneFunctionInto(Function *NewFunc, const Function *OldFunc,
#endif
PruningFunctionCloner PFC(NewFunc, OldFunc, VMap, ModuleLevelChanges,
- Returns, NameSuffix, CodeInfo, TD);
+ NameSuffix, CodeInfo, TD);
// Clone the entry block, and anything recursively reachable from it.
std::vector<const BasicBlock*> CloneWorklist;
@@ -537,6 +532,13 @@ void llvm::CloneAndPruneFunctionInto(Function *NewFunc, const Function *OldFunc,
// and we still want to prune the dead code as early as possible.
ConstantFoldTerminator(I);
+ // Track all of the newly-inserted returns.
+ if (ReturnInst *RI = dyn_cast<ReturnInst>(I->getTerminator())) {
+ Returns.push_back(RI);
+ ++I;
+ continue;
+ }
+
BranchInst *BI = dyn_cast<BranchInst>(I->getTerminator());
if (!BI || BI->isConditional()) { ++I; continue; }
diff --git a/llvm/test/Transforms/Inline/inline_cleanup.ll b/llvm/test/Transforms/Inline/inline_cleanup.ll
index 27ee617fc9d..1dd94f3e6c5 100644
--- a/llvm/test/Transforms/Inline/inline_cleanup.ll
+++ b/llvm/test/Transforms/Inline/inline_cleanup.ll
@@ -136,3 +136,40 @@ entry:
call void @inner2(i32 0, i32 -1, i32 %z, i1 %b)
ret void
}
+
+define void @PR12470_inner(i16 signext %p1) nounwind uwtable {
+entry:
+ br i1 undef, label %cond.true, label %cond.false
+
+cond.true:
+ br label %cond.end
+
+cond.false:
+ %conv = sext i16 %p1 to i32
+ br label %cond.end
+
+cond.end:
+ %cond = phi i32 [ undef, %cond.true ], [ 0, %cond.false ]
+ %tobool = icmp eq i32 %cond, 0
+ br i1 %tobool, label %if.end5, label %if.then
+
+if.then:
+ ret void
+
+if.end5:
+ ret void
+}
+
+define void @PR12470_outer() {
+; This previously crashed during inliner cleanup and folding inner return
+; instructions. Check that we don't crash and we produce a function with a single
+; crash.
+; CHECK: define void @PR12470_outer
+; CHECK: ret void
+; CHECK-NOT: ret void
+; CHECK: }
+
+entry:
+ call void @PR12470_inner(i16 signext 1)
+ ret void
+}
OpenPOWER on IntegriCloud