summaryrefslogtreecommitdiffstats
path: root/llvm/lib/Analysis
diff options
context:
space:
mode:
authorTeresa Johnson <tejohnson@google.com>2018-09-27 14:55:32 +0000
committerTeresa Johnson <tejohnson@google.com>2018-09-27 14:55:32 +0000
commitf24136f17ac15ea6b07a3fe4aa8e1b690a3cd950 (patch)
tree3d8de3fe2d4d805361d0df35c799f3c77a5d2058 /llvm/lib/Analysis
parenta9a5eee1694833989739fc31045b48bdaacafaff (diff)
downloadbcm5719-llvm-f24136f17ac15ea6b07a3fe4aa8e1b690a3cd950.tar.gz
bcm5719-llvm-f24136f17ac15ea6b07a3fe4aa8e1b690a3cd950.zip
[WPD] Fix incorrect devirtualization after indirect call promotion
Summary: Add a dominance check to ensure that the possible devirtualizable call is actually dominated by the type test/checked load intrinsic being analyzed. With PGO, after indirect call promotion is performed during the compile step, followed by inlining, we may have a type test in the promoted and inlined sequence that allows an indirect call in that sequence to be devirtualized. That indirect call (inserted by inlining after promotion) will share the same vtable pointer as the fallback indirect call that cannot be devirtualized. Before this patch the code was incorrectly devirtualizing the fallback indirect call. See the new test and the example described there for more details. Reviewers: pcc, vitalybuka Subscribers: mehdi_amini, Prazek, eraman, steven_wu, dexonsmith, llvm-commits Differential Revision: https://reviews.llvm.org/D52514 llvm-svn: 343226
Diffstat (limited to 'llvm/lib/Analysis')
-rw-r--r--llvm/lib/Analysis/ModuleSummaryAnalysis.cpp23
-rw-r--r--llvm/lib/Analysis/TypeMetadataUtils.cpp42
2 files changed, 39 insertions, 26 deletions
diff --git a/llvm/lib/Analysis/ModuleSummaryAnalysis.cpp b/llvm/lib/Analysis/ModuleSummaryAnalysis.cpp
index 17dae20ce3a..bca40043fd9 100644
--- a/llvm/lib/Analysis/ModuleSummaryAnalysis.cpp
+++ b/llvm/lib/Analysis/ModuleSummaryAnalysis.cpp
@@ -147,7 +147,8 @@ static void addIntrinsicToSummary(
SetVector<FunctionSummary::VFuncId> &TypeTestAssumeVCalls,
SetVector<FunctionSummary::VFuncId> &TypeCheckedLoadVCalls,
SetVector<FunctionSummary::ConstVCall> &TypeTestAssumeConstVCalls,
- SetVector<FunctionSummary::ConstVCall> &TypeCheckedLoadConstVCalls) {
+ SetVector<FunctionSummary::ConstVCall> &TypeCheckedLoadConstVCalls,
+ DominatorTree &DT) {
switch (CI->getCalledFunction()->getIntrinsicID()) {
case Intrinsic::type_test: {
auto *TypeMDVal = cast<MetadataAsValue>(CI->getArgOperand(1));
@@ -172,7 +173,7 @@ static void addIntrinsicToSummary(
SmallVector<DevirtCallSite, 4> DevirtCalls;
SmallVector<CallInst *, 4> Assumes;
- findDevirtualizableCallsForTypeTest(DevirtCalls, Assumes, CI);
+ findDevirtualizableCallsForTypeTest(DevirtCalls, Assumes, CI, DT);
for (auto &Call : DevirtCalls)
addVCallToSet(Call, Guid, TypeTestAssumeVCalls,
TypeTestAssumeConstVCalls);
@@ -192,7 +193,7 @@ static void addIntrinsicToSummary(
SmallVector<Instruction *, 4> Preds;
bool HasNonCallUses = false;
findDevirtualizableCallsForTypeCheckedLoad(DevirtCalls, LoadedPtrs, Preds,
- HasNonCallUses, CI);
+ HasNonCallUses, CI, DT);
// Any non-call uses of the result of llvm.type.checked.load will
// prevent us from optimizing away the llvm.type.test.
if (HasNonCallUses)
@@ -208,11 +209,10 @@ static void addIntrinsicToSummary(
}
}
-static void
-computeFunctionSummary(ModuleSummaryIndex &Index, const Module &M,
- const Function &F, BlockFrequencyInfo *BFI,
- ProfileSummaryInfo *PSI, bool HasLocalsInUsedOrAsm,
- DenseSet<GlobalValue::GUID> &CantBePromoted) {
+static void computeFunctionSummary(
+ ModuleSummaryIndex &Index, const Module &M, const Function &F,
+ BlockFrequencyInfo *BFI, ProfileSummaryInfo *PSI, DominatorTree &DT,
+ bool HasLocalsInUsedOrAsm, DenseSet<GlobalValue::GUID> &CantBePromoted) {
// Summary not currently supported for anonymous functions, they should
// have been named.
assert(F.hasName());
@@ -273,7 +273,7 @@ computeFunctionSummary(ModuleSummaryIndex &Index, const Module &M,
if (CI && CalledFunction->isIntrinsic()) {
addIntrinsicToSummary(
CI, TypeTests, TypeTestAssumeVCalls, TypeCheckedLoadVCalls,
- TypeTestAssumeConstVCalls, TypeCheckedLoadConstVCalls);
+ TypeTestAssumeConstVCalls, TypeCheckedLoadConstVCalls, DT);
continue;
}
// We should have named any anonymous globals
@@ -488,18 +488,19 @@ ModuleSummaryIndex llvm::buildModuleSummaryIndex(
if (F.isDeclaration())
continue;
+ DominatorTree DT(const_cast<Function &>(F));
BlockFrequencyInfo *BFI = nullptr;
std::unique_ptr<BlockFrequencyInfo> BFIPtr;
if (GetBFICallback)
BFI = GetBFICallback(F);
else if (F.hasProfileData()) {
- LoopInfo LI{DominatorTree(const_cast<Function &>(F))};
+ LoopInfo LI{DT};
BranchProbabilityInfo BPI{F, LI};
BFIPtr = llvm::make_unique<BlockFrequencyInfo>(F, BPI, LI);
BFI = BFIPtr.get();
}
- computeFunctionSummary(Index, M, F, BFI, PSI,
+ computeFunctionSummary(Index, M, F, BFI, PSI, DT,
!LocalsUsed.empty() || HasLocalInlineAsmSymbol,
CantBePromoted);
}
diff --git a/llvm/lib/Analysis/TypeMetadataUtils.cpp b/llvm/lib/Analysis/TypeMetadataUtils.cpp
index 6871e4887c9..bd13a43b8d4 100644
--- a/llvm/lib/Analysis/TypeMetadataUtils.cpp
+++ b/llvm/lib/Analysis/TypeMetadataUtils.cpp
@@ -14,6 +14,7 @@
#include "llvm/Analysis/TypeMetadataUtils.h"
#include "llvm/IR/Constants.h"
+#include "llvm/IR/Dominators.h"
#include "llvm/IR/Intrinsics.h"
#include "llvm/IR/Module.h"
@@ -22,11 +23,21 @@ using namespace llvm;
// Search for virtual calls that call FPtr and add them to DevirtCalls.
static void
findCallsAtConstantOffset(SmallVectorImpl<DevirtCallSite> &DevirtCalls,
- bool *HasNonCallUses, Value *FPtr, uint64_t Offset) {
+ bool *HasNonCallUses, Value *FPtr, uint64_t Offset,
+ const CallInst *CI, DominatorTree &DT) {
for (const Use &U : FPtr->uses()) {
- Value *User = U.getUser();
+ Instruction *User = cast<Instruction>(U.getUser());
+ // Ignore this instruction if it is not dominated by the type intrinsic
+ // being analyzed. Otherwise we may transform a call sharing the same
+ // vtable pointer incorrectly. Specifically, this situation can arise
+ // after indirect call promotion and inlining, where we may have uses
+ // of the vtable pointer guarded by a function pointer check, and a fallback
+ // indirect call.
+ if (!DT.dominates(CI, User))
+ continue;
if (isa<BitCastInst>(User)) {
- findCallsAtConstantOffset(DevirtCalls, HasNonCallUses, User, Offset);
+ findCallsAtConstantOffset(DevirtCalls, HasNonCallUses, User, Offset, CI,
+ DT);
} else if (auto CI = dyn_cast<CallInst>(User)) {
DevirtCalls.push_back({Offset, CI});
} else if (auto II = dyn_cast<InvokeInst>(User)) {
@@ -38,23 +49,23 @@ findCallsAtConstantOffset(SmallVectorImpl<DevirtCallSite> &DevirtCalls,
}
// Search for virtual calls that load from VPtr and add them to DevirtCalls.
-static void
-findLoadCallsAtConstantOffset(const Module *M,
- SmallVectorImpl<DevirtCallSite> &DevirtCalls,
- Value *VPtr, int64_t Offset) {
+static void findLoadCallsAtConstantOffset(
+ const Module *M, SmallVectorImpl<DevirtCallSite> &DevirtCalls, Value *VPtr,
+ int64_t Offset, const CallInst *CI, DominatorTree &DT) {
for (const Use &U : VPtr->uses()) {
Value *User = U.getUser();
if (isa<BitCastInst>(User)) {
- findLoadCallsAtConstantOffset(M, DevirtCalls, User, Offset);
+ findLoadCallsAtConstantOffset(M, DevirtCalls, User, Offset, CI, DT);
} else if (isa<LoadInst>(User)) {
- findCallsAtConstantOffset(DevirtCalls, nullptr, User, Offset);
+ findCallsAtConstantOffset(DevirtCalls, nullptr, User, Offset, CI, DT);
} else if (auto GEP = dyn_cast<GetElementPtrInst>(User)) {
// Take into account the GEP offset.
if (VPtr == GEP->getPointerOperand() && GEP->hasAllConstantIndices()) {
SmallVector<Value *, 8> Indices(GEP->op_begin() + 1, GEP->op_end());
int64_t GEPOffset = M->getDataLayout().getIndexedOffsetInType(
GEP->getSourceElementType(), Indices);
- findLoadCallsAtConstantOffset(M, DevirtCalls, User, Offset + GEPOffset);
+ findLoadCallsAtConstantOffset(M, DevirtCalls, User, Offset + GEPOffset,
+ CI, DT);
}
}
}
@@ -62,7 +73,8 @@ findLoadCallsAtConstantOffset(const Module *M,
void llvm::findDevirtualizableCallsForTypeTest(
SmallVectorImpl<DevirtCallSite> &DevirtCalls,
- SmallVectorImpl<CallInst *> &Assumes, const CallInst *CI) {
+ SmallVectorImpl<CallInst *> &Assumes, const CallInst *CI,
+ DominatorTree &DT) {
assert(CI->getCalledFunction()->getIntrinsicID() == Intrinsic::type_test);
const Module *M = CI->getParent()->getParent()->getParent();
@@ -79,15 +91,15 @@ void llvm::findDevirtualizableCallsForTypeTest(
// If we found any, search for virtual calls based on %p and add them to
// DevirtCalls.
if (!Assumes.empty())
- findLoadCallsAtConstantOffset(M, DevirtCalls,
- CI->getArgOperand(0)->stripPointerCasts(), 0);
+ findLoadCallsAtConstantOffset(
+ M, DevirtCalls, CI->getArgOperand(0)->stripPointerCasts(), 0, CI, DT);
}
void llvm::findDevirtualizableCallsForTypeCheckedLoad(
SmallVectorImpl<DevirtCallSite> &DevirtCalls,
SmallVectorImpl<Instruction *> &LoadedPtrs,
SmallVectorImpl<Instruction *> &Preds, bool &HasNonCallUses,
- const CallInst *CI) {
+ const CallInst *CI, DominatorTree &DT) {
assert(CI->getCalledFunction()->getIntrinsicID() ==
Intrinsic::type_checked_load);
@@ -114,5 +126,5 @@ void llvm::findDevirtualizableCallsForTypeCheckedLoad(
for (Value *LoadedPtr : LoadedPtrs)
findCallsAtConstantOffset(DevirtCalls, &HasNonCallUses, LoadedPtr,
- Offset->getZExtValue());
+ Offset->getZExtValue(), CI, DT);
}
OpenPOWER on IntegriCloud