diff options
author | Chandler Carruth <chandlerc@gmail.com> | 2019-01-07 05:42:51 +0000 |
---|---|---|
committer | Chandler Carruth <chandlerc@gmail.com> | 2019-01-07 05:42:51 +0000 |
commit | 363ac6837427ffc6adcc68c44788bb4d92d52873 (patch) | |
tree | 1f5211bde4f720eed89db8e62ee16a4be663c2a8 /llvm/lib/Analysis/AliasSetTracker.cpp | |
parent | f6f134e4d44ca8db242e168de6da7eb68e9cf76e (diff) | |
download | bcm5719-llvm-363ac6837427ffc6adcc68c44788bb4d92d52873.tar.gz bcm5719-llvm-363ac6837427ffc6adcc68c44788bb4d92d52873.zip |
[CallSite removal] Migrate all Alias Analysis APIs to use the newly
minted `CallBase` class instead of the `CallSite` wrapper.
This moves the largest interwoven collection of APIs that traffic in
`CallSite`s. While a handful of these could have been migrated with
a minorly more shallow migration by converting from a `CallSite` to
a `CallBase`, it hardly seemed worth it. Most of the APIs needed to
migrate together because of the complex interplay of AA APIs and the
fact that converting from a `CallBase` to a `CallSite` isn't free in its
current implementation.
Out of tree users of these APIs can fairly reliably migrate with some
combination of `.getInstruction()` on the `CallSite` instance and
casting the resulting pointer. The most generic form will look like `CS`
-> `cast_or_null<CallBase>(CS.getInstruction())` but in most cases there
is a more elegant migration. Hopefully, this migrates enough APIs for
users to fully move from `CallSite` to the base class. All of the
in-tree users were easily migrated in that fashion.
Thanks for the review from Saleem!
Differential Revision: https://reviews.llvm.org/D55641
llvm-svn: 350503
Diffstat (limited to 'llvm/lib/Analysis/AliasSetTracker.cpp')
-rw-r--r-- | llvm/lib/Analysis/AliasSetTracker.cpp | 78 |
1 files changed, 39 insertions, 39 deletions
diff --git a/llvm/lib/Analysis/AliasSetTracker.cpp b/llvm/lib/Analysis/AliasSetTracker.cpp index c152b0ddeca..f6ad704cc91 100644 --- a/llvm/lib/Analysis/AliasSetTracker.cpp +++ b/llvm/lib/Analysis/AliasSetTracker.cpp @@ -16,7 +16,6 @@ #include "llvm/Analysis/GuardUtils.h" #include "llvm/Analysis/MemoryLocation.h" #include "llvm/Config/llvm-config.h" -#include "llvm/IR/CallSite.h" #include "llvm/IR/Constants.h" #include "llvm/IR/DataLayout.h" #include "llvm/IR/Function.h" @@ -236,7 +235,8 @@ bool AliasSet::aliasesUnknownInst(const Instruction *Inst, for (unsigned i = 0, e = UnknownInsts.size(); i != e; ++i) { if (auto *UnknownInst = getUnknownInst(i)) { - ImmutableCallSite C1(UnknownInst), C2(Inst); + const auto *C1 = dyn_cast<CallBase>(UnknownInst); + const auto *C2 = dyn_cast<CallBase>(Inst); if (!C1 || !C2 || isModOrRefSet(AA.getModRefInfo(C1, C2)) || isModOrRefSet(AA.getModRefInfo(C2, C1))) return true; @@ -446,44 +446,44 @@ void AliasSetTracker::add(Instruction *I) { return add(MTI); // Handle all calls with known mod/ref sets genericall - CallSite CS(I); - if (CS && CS.onlyAccessesArgMemory()) { - auto getAccessFromModRef = [](ModRefInfo MRI) { - if (isRefSet(MRI) && isModSet(MRI)) - return AliasSet::ModRefAccess; - else if (isModSet(MRI)) - return AliasSet::ModAccess; - else if (isRefSet(MRI)) - return AliasSet::RefAccess; - else - return AliasSet::NoAccess; - - }; - - ModRefInfo CallMask = createModRefInfo(AA.getModRefBehavior(CS)); - - // Some intrinsics are marked as modifying memory for control flow - // modelling purposes, but don't actually modify any specific memory - // location. - using namespace PatternMatch; - if (I->use_empty() && match(I, m_Intrinsic<Intrinsic::invariant_start>())) - CallMask = clearMod(CallMask); - - for (auto AI = CS.arg_begin(), AE = CS.arg_end(); AI != AE; ++AI) { - const Value *Arg = *AI; - if (!Arg->getType()->isPointerTy()) - continue; - unsigned ArgIdx = std::distance(CS.arg_begin(), AI); - MemoryLocation ArgLoc = MemoryLocation::getForArgument(CS, ArgIdx, - nullptr); - ModRefInfo ArgMask = AA.getArgModRefInfo(CS, ArgIdx); - ArgMask = intersectModRef(CallMask, ArgMask); - if (!isNoModRef(ArgMask)) - addPointer(ArgLoc, getAccessFromModRef(ArgMask)); + if (auto *Call = dyn_cast<CallBase>(I)) + if (Call->onlyAccessesArgMemory()) { + auto getAccessFromModRef = [](ModRefInfo MRI) { + if (isRefSet(MRI) && isModSet(MRI)) + return AliasSet::ModRefAccess; + else if (isModSet(MRI)) + return AliasSet::ModAccess; + else if (isRefSet(MRI)) + return AliasSet::RefAccess; + else + return AliasSet::NoAccess; + }; + + ModRefInfo CallMask = createModRefInfo(AA.getModRefBehavior(Call)); + + // Some intrinsics are marked as modifying memory for control flow + // modelling purposes, but don't actually modify any specific memory + // location. + using namespace PatternMatch; + if (Call->use_empty() && + match(Call, m_Intrinsic<Intrinsic::invariant_start>())) + CallMask = clearMod(CallMask); + + for (auto IdxArgPair : enumerate(Call->args())) { + int ArgIdx = IdxArgPair.index(); + const Value *Arg = IdxArgPair.value(); + if (!Arg->getType()->isPointerTy()) + continue; + MemoryLocation ArgLoc = + MemoryLocation::getForArgument(Call, ArgIdx, nullptr); + ModRefInfo ArgMask = AA.getArgModRefInfo(Call, ArgIdx); + ArgMask = intersectModRef(CallMask, ArgMask); + if (!isNoModRef(ArgMask)) + addPointer(ArgLoc, getAccessFromModRef(ArgMask)); + } + return; } - return; - } - + return addUnknown(I); } |