diff options
author | Matt Arsenault <Matthew.Arsenault@amd.com> | 2019-06-12 14:23:33 +0000 |
---|---|---|
committer | Matt Arsenault <Matthew.Arsenault@amd.com> | 2019-06-12 14:23:33 +0000 |
commit | f29366b1f594f48465c5a2754bcffac6d70fd0b1 (patch) | |
tree | d53fb6d4ddf8b4ec6fa71a065d4fcdde26349649 /llvm/lib/CodeGen/StackProtector.cpp | |
parent | 61f6395fd011267307f73e9d0b80d6ae2c24de84 (diff) | |
download | bcm5719-llvm-f29366b1f594f48465c5a2754bcffac6d70fd0b1.tar.gz bcm5719-llvm-f29366b1f594f48465c5a2754bcffac6d70fd0b1.zip |
StackProtector: Use PointerMayBeCaptured
This was using its own, outdated list of possible captures. This was
at minimum not catching cmpxchg and addrspacecast captures.
One change is now any volatile access is treated as capturing. The
test coverage for this pass is quite inadequate, but this required
removing volatile in the lifetime capture test.
Also fixes some infrastructure issues to allow running just the IR
pass.
Fixes bug 42238.
llvm-svn: 363169
Diffstat (limited to 'llvm/lib/CodeGen/StackProtector.cpp')
-rw-r--r-- | llvm/lib/CodeGen/StackProtector.cpp | 39 |
1 files changed, 4 insertions, 35 deletions
diff --git a/llvm/lib/CodeGen/StackProtector.cpp b/llvm/lib/CodeGen/StackProtector.cpp index dc215a033fc..809960c7fdf 100644 --- a/llvm/lib/CodeGen/StackProtector.cpp +++ b/llvm/lib/CodeGen/StackProtector.cpp @@ -17,6 +17,7 @@ #include "llvm/ADT/SmallPtrSet.h" #include "llvm/ADT/Statistic.h" #include "llvm/Analysis/BranchProbabilityInfo.h" +#include "llvm/Analysis/CaptureTracking.h" #include "llvm/Analysis/EHPersonalities.h" #include "llvm/Analysis/OptimizationRemarkEmitter.h" #include "llvm/CodeGen/Passes.h" @@ -156,40 +157,6 @@ bool StackProtector::ContainsProtectableArray(Type *Ty, bool &IsLarge, return NeedsProtector; } -bool StackProtector::HasAddressTaken(const Instruction *AI) { - for (const User *U : AI->users()) { - if (const StoreInst *SI = dyn_cast<StoreInst>(U)) { - if (AI == SI->getValueOperand()) - return true; - } else if (const PtrToIntInst *SI = dyn_cast<PtrToIntInst>(U)) { - if (AI == SI->getOperand(0)) - return true; - } else if (const CallInst *CI = dyn_cast<CallInst>(U)) { - // Ignore intrinsics that are not calls. TODO: Use isLoweredToCall(). - if (!isa<DbgInfoIntrinsic>(CI) && !CI->isLifetimeStartOrEnd()) - return true; - } else if (isa<InvokeInst>(U)) { - return true; - } else if (const SelectInst *SI = dyn_cast<SelectInst>(U)) { - if (HasAddressTaken(SI)) - return true; - } else if (const PHINode *PN = dyn_cast<PHINode>(U)) { - // Keep track of what PHI nodes we have already visited to ensure - // they are only visited once. - if (VisitedPHIs.insert(PN).second) - if (HasAddressTaken(PN)) - return true; - } else if (const GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(U)) { - if (HasAddressTaken(GEP)) - return true; - } else if (const BitCastInst *BI = dyn_cast<BitCastInst>(U)) { - if (HasAddressTaken(BI)) - return true; - } - } - return false; -} - /// Search for the first call to the llvm.stackprotector intrinsic and return it /// if present. static const CallInst *findStackProtectorIntrinsic(Function &F) { @@ -297,7 +264,9 @@ bool StackProtector::RequiresStackProtector() { continue; } - if (Strong && HasAddressTaken(AI)) { + if (Strong && PointerMayBeCaptured(AI, + /* ReturnCaptures */ false, + /* StoreCaptures */ true)) { ++NumAddrTaken; Layout.insert(std::make_pair(AI, MachineFrameInfo::SSPLK_AddrOf)); ORE.emit([&]() { |