summaryrefslogtreecommitdiffstats
path: root/llvm/lib/Transforms
diff options
context:
space:
mode:
authorJohn McCall <rjmccall@apple.com>2019-08-14 03:54:05 +0000
committerJohn McCall <rjmccall@apple.com>2019-08-14 03:54:05 +0000
commit2133feec933ea7311ee74ee39a5e6a0bcfaef822 (patch)
treef4f4d1898d9d66e6bfcad098aea6372164f5a47d /llvm/lib/Transforms
parentdc4668e5cf91029f3044114903c286c10e1ffdea (diff)
downloadbcm5719-llvm-2133feec933ea7311ee74ee39a5e6a0bcfaef822.tar.gz
bcm5719-llvm-2133feec933ea7311ee74ee39a5e6a0bcfaef822.zip
Support swifterror in coroutine lowering.
The support for swifterror allocas should work in all lowerings. The support for swifterror arguments only really works in a lowering with prototypes where you can ensure that the prototype also has a swifterror argument; I'm not really sure how it could possibly be made to work in the switch lowering. llvm-svn: 368795
Diffstat (limited to 'llvm/lib/Transforms')
-rw-r--r--llvm/lib/Transforms/Coroutines/CoroFrame.cpp166
-rw-r--r--llvm/lib/Transforms/Coroutines/CoroInternal.h1
-rw-r--r--llvm/lib/Transforms/Coroutines/CoroSplit.cpp71
3 files changed, 238 insertions, 0 deletions
diff --git a/llvm/lib/Transforms/Coroutines/CoroFrame.cpp b/llvm/lib/Transforms/Coroutines/CoroFrame.cpp
index 52c167c9f71..45677ab8a74 100644
--- a/llvm/lib/Transforms/Coroutines/CoroFrame.cpp
+++ b/llvm/lib/Transforms/Coroutines/CoroFrame.cpp
@@ -28,6 +28,7 @@
#include "llvm/Support/MathExtras.h"
#include "llvm/Support/circular_raw_ostream.h"
#include "llvm/Transforms/Utils/BasicBlockUtils.h"
+#include "llvm/Transforms/Utils/PromoteMemToReg.h"
using namespace llvm;
@@ -1110,11 +1111,176 @@ static Instruction *lowerNonLocalAlloca(CoroAllocaAllocInst *AI,
return cast<Instruction>(Alloc);
}
+/// Get the current swifterror value.
+static Value *emitGetSwiftErrorValue(IRBuilder<> &Builder, Type *ValueTy,
+ coro::Shape &Shape) {
+ // Make a fake function pointer as a sort of intrinsic.
+ auto FnTy = FunctionType::get(ValueTy, {}, false);
+ auto Fn = ConstantPointerNull::get(FnTy->getPointerTo());
+
+ auto Call = Builder.CreateCall(Fn, {});
+ Shape.SwiftErrorOps.push_back(Call);
+
+ return Call;
+}
+
+/// Set the given value as the current swifterror value.
+///
+/// Returns a slot that can be used as a swifterror slot.
+static Value *emitSetSwiftErrorValue(IRBuilder<> &Builder, Value *V,
+ coro::Shape &Shape) {
+ // Make a fake function pointer as a sort of intrinsic.
+ auto FnTy = FunctionType::get(V->getType()->getPointerTo(),
+ {V->getType()}, false);
+ auto Fn = ConstantPointerNull::get(FnTy->getPointerTo());
+
+ auto Call = Builder.CreateCall(Fn, { V });
+ Shape.SwiftErrorOps.push_back(Call);
+
+ return Call;
+}
+
+/// Set the swifterror value from the given alloca before a call,
+/// then put in back in the alloca afterwards.
+///
+/// Returns an address that will stand in for the swifterror slot
+/// until splitting.
+static Value *emitSetAndGetSwiftErrorValueAround(Instruction *Call,
+ AllocaInst *Alloca,
+ coro::Shape &Shape) {
+ auto ValueTy = Alloca->getAllocatedType();
+ IRBuilder<> Builder(Call);
+
+ // Load the current value from the alloca and set it as the
+ // swifterror value.
+ auto ValueBeforeCall = Builder.CreateLoad(ValueTy, Alloca);
+ auto Addr = emitSetSwiftErrorValue(Builder, ValueBeforeCall, Shape);
+
+ // Move to after the call. Since swifterror only has a guaranteed
+ // value on normal exits, we can ignore implicit and explicit unwind
+ // edges.
+ if (isa<CallInst>(Call)) {
+ Builder.SetInsertPoint(Call->getNextNode());
+ } else {
+ auto Invoke = cast<InvokeInst>(Call);
+ Builder.SetInsertPoint(Invoke->getNormalDest()->getFirstNonPHIOrDbg());
+ }
+
+ // Get the current swifterror value and store it to the alloca.
+ auto ValueAfterCall = emitGetSwiftErrorValue(Builder, ValueTy, Shape);
+ Builder.CreateStore(ValueAfterCall, Alloca);
+
+ return Addr;
+}
+
+/// Eliminate a formerly-swifterror alloca by inserting the get/set
+/// intrinsics and attempting to MemToReg the alloca away.
+static void eliminateSwiftErrorAlloca(Function &F, AllocaInst *Alloca,
+ coro::Shape &Shape) {
+ for (auto UI = Alloca->use_begin(), UE = Alloca->use_end(); UI != UE; ) {
+ // We're likely changing the use list, so use a mutation-safe
+ // iteration pattern.
+ auto &Use = *UI;
+ ++UI;
+
+ // swifterror values can only be used in very specific ways.
+ // We take advantage of that here.
+ auto User = Use.getUser();
+ if (isa<LoadInst>(User) || isa<StoreInst>(User))
+ continue;
+
+ assert(isa<CallInst>(User) || isa<InvokeInst>(User));
+ auto Call = cast<Instruction>(User);
+
+ auto Addr = emitSetAndGetSwiftErrorValueAround(Call, Alloca, Shape);
+
+ // Use the returned slot address as the call argument.
+ Use.set(Addr);
+ }
+
+ // All the uses should be loads and stores now.
+ assert(isAllocaPromotable(Alloca));
+}
+
+/// "Eliminate" a swifterror argument by reducing it to the alloca case
+/// and then loading and storing in the prologue and epilog.
+///
+/// The argument keeps the swifterror flag.
+static void eliminateSwiftErrorArgument(Function &F, Argument &Arg,
+ coro::Shape &Shape,
+ SmallVectorImpl<AllocaInst*> &AllocasToPromote) {
+ IRBuilder<> Builder(F.getEntryBlock().getFirstNonPHIOrDbg());
+
+ auto ArgTy = cast<PointerType>(Arg.getType());
+ auto ValueTy = ArgTy->getElementType();
+
+ // Reduce to the alloca case:
+
+ // Create an alloca and replace all uses of the arg with it.
+ auto Alloca = Builder.CreateAlloca(ValueTy, ArgTy->getAddressSpace());
+ Arg.replaceAllUsesWith(Alloca);
+
+ // Set an initial value in the alloca. swifterror is always null on entry.
+ auto InitialValue = Constant::getNullValue(ValueTy);
+ Builder.CreateStore(InitialValue, Alloca);
+
+ // Find all the suspends in the function and save and restore around them.
+ for (auto Suspend : Shape.CoroSuspends) {
+ (void) emitSetAndGetSwiftErrorValueAround(Suspend, Alloca, Shape);
+ }
+
+ // Find all the coro.ends in the function and restore the error value.
+ for (auto End : Shape.CoroEnds) {
+ Builder.SetInsertPoint(End);
+ auto FinalValue = Builder.CreateLoad(ValueTy, Alloca);
+ (void) emitSetSwiftErrorValue(Builder, FinalValue, Shape);
+ }
+
+ // Now we can use the alloca logic.
+ AllocasToPromote.push_back(Alloca);
+ eliminateSwiftErrorAlloca(F, Alloca, Shape);
+}
+
+/// Eliminate all problematic uses of swifterror arguments and allocas
+/// from the function. We'll fix them up later when splitting the function.
+static void eliminateSwiftError(Function &F, coro::Shape &Shape) {
+ SmallVector<AllocaInst*, 4> AllocasToPromote;
+
+ // Look for a swifterror argument.
+ for (auto &Arg : F.args()) {
+ if (!Arg.hasSwiftErrorAttr()) continue;
+
+ eliminateSwiftErrorArgument(F, Arg, Shape, AllocasToPromote);
+ break;
+ }
+
+ // Look for swifterror allocas.
+ for (auto &Inst : F.getEntryBlock()) {
+ auto Alloca = dyn_cast<AllocaInst>(&Inst);
+ if (!Alloca || !Alloca->isSwiftError()) continue;
+
+ // Clear the swifterror flag.
+ Alloca->setSwiftError(false);
+
+ AllocasToPromote.push_back(Alloca);
+ eliminateSwiftErrorAlloca(F, Alloca, Shape);
+ }
+
+ // If we have any allocas to promote, compute a dominator tree and
+ // promote them en masse.
+ if (!AllocasToPromote.empty()) {
+ DominatorTree DT(F);
+ PromoteMemToReg(AllocasToPromote, DT);
+ }
+}
+
void coro::buildCoroutineFrame(Function &F, Shape &Shape) {
// Lower coro.dbg.declare to coro.dbg.value, since we are going to rewrite
// access to local variables.
LowerDbgDeclare(F);
+ eliminateSwiftError(F, Shape);
+
if (Shape.ABI == coro::ABI::Switch &&
Shape.SwitchLowering.PromiseAlloca) {
Shape.getSwitchCoroId()->clearPromise();
diff --git a/llvm/lib/Transforms/Coroutines/CoroInternal.h b/llvm/lib/Transforms/Coroutines/CoroInternal.h
index d2348057c24..98affcc5f44 100644
--- a/llvm/lib/Transforms/Coroutines/CoroInternal.h
+++ b/llvm/lib/Transforms/Coroutines/CoroInternal.h
@@ -89,6 +89,7 @@ struct LLVM_LIBRARY_VISIBILITY Shape {
SmallVector<CoroEndInst *, 4> CoroEnds;
SmallVector<CoroSizeInst *, 2> CoroSizes;
SmallVector<AnyCoroSuspendInst *, 4> CoroSuspends;
+ SmallVector<CallInst*, 2> SwiftErrorOps;
// Field indexes for special fields in the switch lowering.
struct SwitchFieldIndex {
diff --git a/llvm/lib/Transforms/Coroutines/CoroSplit.cpp b/llvm/lib/Transforms/Coroutines/CoroSplit.cpp
index 1183352271c..db318d858da 100644
--- a/llvm/lib/Transforms/Coroutines/CoroSplit.cpp
+++ b/llvm/lib/Transforms/Coroutines/CoroSplit.cpp
@@ -97,6 +97,7 @@ private:
ValueToValueMapTy VMap;
IRBuilder<> Builder;
Value *NewFramePtr = nullptr;
+ Value *SwiftErrorSlot = nullptr;
/// The active suspend instruction; meaningful only for continuation ABIs.
AnyCoroSuspendInst *ActiveSuspend = nullptr;
@@ -147,6 +148,7 @@ private:
void replaceRetconSuspendUses();
void replaceCoroSuspends();
void replaceCoroEnds();
+ void replaceSwiftErrorOps();
void handleFinalSuspend();
void maybeFreeContinuationStorage();
};
@@ -490,6 +492,68 @@ void CoroCloner::replaceCoroEnds() {
}
}
+static void replaceSwiftErrorOps(Function &F, coro::Shape &Shape,
+ ValueToValueMapTy *VMap) {
+ Value *CachedSlot = nullptr;
+ auto getSwiftErrorSlot = [&](Type *ValueTy) -> Value * {
+ if (CachedSlot) {
+ assert(CachedSlot->getType()->getPointerElementType() == ValueTy &&
+ "multiple swifterror slots in function with different types");
+ return CachedSlot;
+ }
+
+ // Check if the function has a swifterror argument.
+ for (auto &Arg : F.args()) {
+ if (Arg.isSwiftError()) {
+ CachedSlot = &Arg;
+ assert(Arg.getType()->getPointerElementType() == ValueTy &&
+ "swifterror argument does not have expected type");
+ return &Arg;
+ }
+ }
+
+ // Create a swifterror alloca.
+ IRBuilder<> Builder(F.getEntryBlock().getFirstNonPHIOrDbg());
+ auto Alloca = Builder.CreateAlloca(ValueTy);
+ Alloca->setSwiftError(true);
+
+ CachedSlot = Alloca;
+ return Alloca;
+ };
+
+ for (CallInst *Op : Shape.SwiftErrorOps) {
+ auto MappedOp = VMap ? cast<CallInst>((*VMap)[Op]) : Op;
+ IRBuilder<> Builder(MappedOp);
+
+ // If there are no arguments, this is a 'get' operation.
+ Value *MappedResult;
+ if (Op->getNumArgOperands() == 0) {
+ auto ValueTy = Op->getType();
+ auto Slot = getSwiftErrorSlot(ValueTy);
+ MappedResult = Builder.CreateLoad(ValueTy, Slot);
+ } else {
+ assert(Op->getNumArgOperands() == 1);
+ auto Value = MappedOp->getArgOperand(0);
+ auto ValueTy = Value->getType();
+ auto Slot = getSwiftErrorSlot(ValueTy);
+ Builder.CreateStore(Value, Slot);
+ MappedResult = Slot;
+ }
+
+ MappedOp->replaceAllUsesWith(MappedResult);
+ MappedOp->eraseFromParent();
+ }
+
+ // If we're updating the original function, we've invalidated SwiftErrorOps.
+ if (VMap == nullptr) {
+ Shape.SwiftErrorOps.clear();
+ }
+}
+
+void CoroCloner::replaceSwiftErrorOps() {
+ ::replaceSwiftErrorOps(*NewF, Shape, &VMap);
+}
+
void CoroCloner::replaceEntryBlock() {
// In the original function, the AllocaSpillBlock is a block immediately
// following the allocation of the frame object which defines GEPs for
@@ -691,6 +755,9 @@ void CoroCloner::create() {
// Handle suspends.
replaceCoroSuspends();
+ // Handle swifterror.
+ replaceSwiftErrorOps();
+
// Remove coro.end intrinsics.
replaceCoroEnds();
@@ -1364,6 +1431,10 @@ static void splitCoroutine(Function &F, CallGraph &CG, CallGraphSCC &SCC) {
splitCoroutine(F, Shape, Clones);
}
+ // Replace all the swifterror operations in the original function.
+ // This invalidates SwiftErrorOps in the Shape.
+ replaceSwiftErrorOps(F, Shape, nullptr);
+
removeCoroEnds(Shape, &CG);
postSplitCleanup(F);
OpenPOWER on IntegriCloud