summaryrefslogtreecommitdiffstats
path: root/llvm/lib/CodeGen
diff options
context:
space:
mode:
Diffstat (limited to 'llvm/lib/CodeGen')
-rw-r--r--llvm/lib/CodeGen/SelectionDAG/FastISel.cpp13
-rw-r--r--llvm/lib/CodeGen/SelectionDAG/FunctionLoweringInfo.cpp32
-rw-r--r--llvm/lib/CodeGen/SelectionDAG/SelectionDAGBuilder.cpp75
-rw-r--r--llvm/lib/CodeGen/SelectionDAG/SelectionDAGISel.cpp216
4 files changed, 184 insertions, 152 deletions
diff --git a/llvm/lib/CodeGen/SelectionDAG/FastISel.cpp b/llvm/lib/CodeGen/SelectionDAG/FastISel.cpp
index a246f12e96a..4ca22497703 100644
--- a/llvm/lib/CodeGen/SelectionDAG/FastISel.cpp
+++ b/llvm/lib/CodeGen/SelectionDAG/FastISel.cpp
@@ -1315,15 +1315,6 @@ bool FastISel::selectBitCast(const User *I) {
return true;
}
-// Return true if we should copy from swift error to the final vreg as specified
-// by SwiftErrorWorklist.
-static bool shouldCopySwiftErrorsToFinalVRegs(const TargetLowering &TLI,
- FunctionLoweringInfo &FuncInfo) {
- if (!TLI.supportSwiftError())
- return false;
- return FuncInfo.SwiftErrorWorklist.count(FuncInfo.MBB);
-}
-
// Remove local value instructions starting from the instruction after
// SavedLastLocalValue to the current function insert point.
void FastISel::removeDeadLocalValueCode(MachineInstr *SavedLastLocalValue)
@@ -1348,10 +1339,6 @@ bool FastISel::selectInstruction(const Instruction *I) {
// Just before the terminator instruction, insert instructions to
// feed PHI nodes in successor blocks.
if (isa<TerminatorInst>(I)) {
- // If we need to materialize any vreg from worklist, we bail out of
- // FastISel.
- if (shouldCopySwiftErrorsToFinalVRegs(TLI, FuncInfo))
- return false;
if (!handlePHINodesInSuccessorBlocks(I->getParent())) {
// PHI node handling may have generated local value instructions,
// even though it failed to handle all PHI nodes.
diff --git a/llvm/lib/CodeGen/SelectionDAG/FunctionLoweringInfo.cpp b/llvm/lib/CodeGen/SelectionDAG/FunctionLoweringInfo.cpp
index 87c968a85e9..3d27f5df205 100644
--- a/llvm/lib/CodeGen/SelectionDAG/FunctionLoweringInfo.cpp
+++ b/llvm/lib/CodeGen/SelectionDAG/FunctionLoweringInfo.cpp
@@ -596,18 +596,26 @@ void llvm::AddLandingPadInfo(const LandingPadInst &I, MachineModuleInfo &MMI,
}
}
-unsigned FunctionLoweringInfo::findSwiftErrorVReg(const MachineBasicBlock *MBB,
- const Value* Val) const {
- // Find the index in SwiftErrorVals.
- SwiftErrorValues::const_iterator I = find(SwiftErrorVals, Val);
- assert(I != SwiftErrorVals.end() && "Can't find value in SwiftErrorVals");
- return SwiftErrorMap.lookup(MBB)[I - SwiftErrorVals.begin()];
+unsigned
+FunctionLoweringInfo::getOrCreateSwiftErrorVReg(const MachineBasicBlock *MBB,
+ const Value *Val) {
+ auto Key = std::make_pair(MBB, Val);
+ auto It = SwiftErrorVRegDefMap.find(Key);
+ // If this is the first use of this swifterror value in this basic block,
+ // create a new virtual register.
+ // After we processed all basic blocks we will satisfy this "upwards exposed
+ // use" by inserting a copy or phi at the beginning of this block.
+ if (It == SwiftErrorVRegDefMap.end()) {
+ auto &DL = MF->getDataLayout();
+ const TargetRegisterClass *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
+ auto VReg = MF->getRegInfo().createVirtualRegister(RC);
+ SwiftErrorVRegDefMap[Key] = VReg;
+ SwiftErrorVRegUpwardsUse[Key] = VReg;
+ return VReg;
+ } else return It->second;
}
-void FunctionLoweringInfo::setSwiftErrorVReg(const MachineBasicBlock *MBB,
- const Value* Val, unsigned VReg) {
- // Find the index in SwiftErrorVals.
- SwiftErrorValues::iterator I = find(SwiftErrorVals, Val);
- assert(I != SwiftErrorVals.end() && "Can't find value in SwiftErrorVals");
- SwiftErrorMap[MBB][I - SwiftErrorVals.begin()] = VReg;
+void FunctionLoweringInfo::setCurrentSwiftErrorVReg(
+ const MachineBasicBlock *MBB, const Value *Val, unsigned VReg) {
+ SwiftErrorVRegDefMap[std::make_pair(MBB, Val)] = VReg;
}
diff --git a/llvm/lib/CodeGen/SelectionDAG/SelectionDAGBuilder.cpp b/llvm/lib/CodeGen/SelectionDAG/SelectionDAGBuilder.cpp
index 5a4d49d9368..4ae94e0befa 100644
--- a/llvm/lib/CodeGen/SelectionDAG/SelectionDAGBuilder.cpp
+++ b/llvm/lib/CodeGen/SelectionDAG/SelectionDAGBuilder.cpp
@@ -930,46 +930,9 @@ SDValue SelectionDAGBuilder::getControlRoot() {
return Root;
}
-/// Copy swift error to the final virtual register at end of a basic block, as
-/// specified by SwiftErrorWorklist, if necessary.
-static void copySwiftErrorsToFinalVRegs(SelectionDAGBuilder &SDB) {
- const TargetLowering &TLI = SDB.DAG.getTargetLoweringInfo();
- if (!TLI.supportSwiftError())
- return;
-
- if (!SDB.FuncInfo.SwiftErrorWorklist.count(SDB.FuncInfo.MBB))
- return;
-
- // Go through entries in SwiftErrorWorklist, and create copy as necessary.
- FunctionLoweringInfo::SwiftErrorVRegs &WorklistEntry =
- SDB.FuncInfo.SwiftErrorWorklist[SDB.FuncInfo.MBB];
- FunctionLoweringInfo::SwiftErrorVRegs &MapEntry =
- SDB.FuncInfo.SwiftErrorMap[SDB.FuncInfo.MBB];
- for (unsigned I = 0, E = WorklistEntry.size(); I < E; I++) {
- unsigned WorkReg = WorklistEntry[I];
-
- // Find the swifterror virtual register for the value in SwiftErrorMap.
- unsigned MapReg = MapEntry[I];
- assert(TargetRegisterInfo::isVirtualRegister(MapReg) &&
- "Entries in SwiftErrorMap should be virtual registers");
-
- if (WorkReg == MapReg)
- continue;
-
- // Create copy from SwiftErrorMap to SwiftWorklist.
- auto &DL = SDB.DAG.getDataLayout();
- SDValue CopyNode = SDB.DAG.getCopyToReg(
- SDB.getRoot(), SDB.getCurSDLoc(), WorkReg,
- SDB.DAG.getRegister(MapReg, EVT(TLI.getPointerTy(DL))));
- MapEntry[I] = WorkReg;
- SDB.DAG.setRoot(CopyNode);
- }
-}
-
void SelectionDAGBuilder::visit(const Instruction &I) {
// Set up outgoing PHI node register values before emitting the terminator.
if (isa<TerminatorInst>(&I)) {
- copySwiftErrorsToFinalVRegs(*this);
HandlePHINodesInSuccessorBlocks(I.getParent());
}
@@ -1489,6 +1452,7 @@ void SelectionDAGBuilder::visitRet(const ReturnInst &I) {
const Function *F = I.getParent()->getParent();
if (TLI.supportSwiftError() &&
F->getAttributes().hasAttrSomewhere(Attribute::SwiftError)) {
+ assert(FuncInfo.SwiftErrorArg && "Need a swift error argument");
ISD::ArgFlagsTy Flags = ISD::ArgFlagsTy();
Flags.setSwiftError();
Outs.push_back(ISD::OutputArg(Flags, EVT(TLI.getPointerTy(DL)) /*vt*/,
@@ -1496,7 +1460,8 @@ void SelectionDAGBuilder::visitRet(const ReturnInst &I) {
true /*isfixed*/, 1 /*origidx*/,
0 /*partOffs*/));
// Create SDNode for the swifterror virtual register.
- OutVals.push_back(DAG.getRegister(FuncInfo.SwiftErrorMap[FuncInfo.MBB][0],
+ OutVals.push_back(DAG.getRegister(FuncInfo.getOrCreateSwiftErrorVReg(
+ FuncInfo.MBB, FuncInfo.SwiftErrorArg),
EVT(TLI.getPointerTy(DL))));
}
@@ -3590,7 +3555,7 @@ void SelectionDAGBuilder::visitStoreToSwiftError(const StoreInst &I) {
SDValue CopyNode = DAG.getCopyToReg(getRoot(), getCurSDLoc(), VReg,
SDValue(Src.getNode(), Src.getResNo()));
DAG.setRoot(CopyNode);
- FuncInfo.setSwiftErrorVReg(FuncInfo.MBB, I.getOperand(1), VReg);
+ FuncInfo.setCurrentSwiftErrorVReg(FuncInfo.MBB, I.getOperand(1), VReg);
}
void SelectionDAGBuilder::visitLoadFromSwiftError(const LoadInst &I) {
@@ -3618,9 +3583,9 @@ void SelectionDAGBuilder::visitLoadFromSwiftError(const LoadInst &I) {
"expect a single EVT for swifterror");
// Chain, DL, Reg, VT, Glue or Chain, DL, Reg, VT
- SDValue L = DAG.getCopyFromReg(getRoot(), getCurSDLoc(),
- FuncInfo.findSwiftErrorVReg(FuncInfo.MBB, SV),
- ValueVTs[0]);
+ SDValue L = DAG.getCopyFromReg(
+ getRoot(), getCurSDLoc(),
+ FuncInfo.getOrCreateSwiftErrorVReg(FuncInfo.MBB, SV), ValueVTs[0]);
setValue(&I, L);
}
@@ -5815,9 +5780,9 @@ void SelectionDAGBuilder::LowerCallTo(ImmutableCallSite CS, SDValue Callee,
SwiftErrorVal = V;
// We find the virtual register for the actual swifterror argument.
// Instead of using the Value, we use the virtual register instead.
- Entry.Node = DAG.getRegister(
- FuncInfo.findSwiftErrorVReg(FuncInfo.MBB, V),
- EVT(TLI.getPointerTy(DL)));
+ Entry.Node =
+ DAG.getRegister(FuncInfo.getOrCreateSwiftErrorVReg(FuncInfo.MBB, V),
+ EVT(TLI.getPointerTy(DL)));
}
Args.push_back(Entry);
@@ -5862,7 +5827,7 @@ void SelectionDAGBuilder::LowerCallTo(ImmutableCallSite CS, SDValue Callee,
unsigned VReg = FuncInfo.MF->getRegInfo().createVirtualRegister(RC);
SDValue CopyNode = CLI.DAG.getCopyToReg(Result.second, CLI.DL, VReg, Src);
// We update the virtual register for the actual swifterror argument.
- FuncInfo.setSwiftErrorVReg(FuncInfo.MBB, SwiftErrorVal, VReg);
+ FuncInfo.setCurrentSwiftErrorVReg(FuncInfo.MBB, SwiftErrorVal, VReg);
DAG.setRoot(CopyNode);
}
}
@@ -8119,7 +8084,10 @@ void SelectionDAGISel::LowerArguments(const Function &F) {
// If this argument is unused then remember its value. It is used to generate
// debugging information.
- if (I->use_empty() && NumValues) {
+ bool isSwiftErrorArg =
+ TLI->supportSwiftError() &&
+ F.getAttributes().hasAttribute(Idx, Attribute::SwiftError);
+ if (I->use_empty() && NumValues && !isSwiftErrorArg) {
SDB->setUnusedArgValue(&*I, InVals[i]);
// Also remember any frame index for use in FastISel.
@@ -8133,7 +8101,10 @@ void SelectionDAGISel::LowerArguments(const Function &F) {
MVT PartVT = TLI->getRegisterType(*CurDAG->getContext(), VT);
unsigned NumParts = TLI->getNumRegisters(*CurDAG->getContext(), VT);
- if (!I->use_empty()) {
+ // Even an apparant 'unused' swifterror argument needs to be returned. So
+ // we do generate a copy for it that can be used on return from the
+ // function.
+ if (!I->use_empty() || isSwiftErrorArg) {
Optional<ISD::NodeType> AssertOp;
if (F.getAttributes().hasAttribute(Idx, Attribute::SExt))
AssertOp = ISD::AssertSext;
@@ -8169,12 +8140,12 @@ void SelectionDAGISel::LowerArguments(const Function &F) {
FuncInfo->setArgumentFrameIndex(&*I, FI->getIndex());
}
- // Update SwiftErrorMap.
- if (Res.getOpcode() == ISD::CopyFromReg && TLI->supportSwiftError() &&
- F.getAttributes().hasAttribute(Idx, Attribute::SwiftError)) {
+ // Update the SwiftErrorVRegDefMap.
+ if (Res.getOpcode() == ISD::CopyFromReg && isSwiftErrorArg) {
unsigned Reg = cast<RegisterSDNode>(Res.getOperand(1))->getReg();
if (TargetRegisterInfo::isVirtualRegister(Reg))
- FuncInfo->SwiftErrorMap[FuncInfo->MBB][0] = Reg;
+ FuncInfo->setCurrentSwiftErrorVReg(FuncInfo->MBB,
+ FuncInfo->SwiftErrorArg, Reg);
}
// If this argument is live outside of the entry block, insert a copy from
diff --git a/llvm/lib/CodeGen/SelectionDAG/SelectionDAGISel.cpp b/llvm/lib/CodeGen/SelectionDAG/SelectionDAGISel.cpp
index 3958b7ba0f5..317e7e05535 100644
--- a/llvm/lib/CodeGen/SelectionDAG/SelectionDAGISel.cpp
+++ b/llvm/lib/CodeGen/SelectionDAG/SelectionDAGISel.cpp
@@ -1157,14 +1157,21 @@ static void setupSwiftErrorVals(const Function &Fn, const TargetLowering *TLI,
return;
FuncInfo->SwiftErrorVals.clear();
- FuncInfo->SwiftErrorMap.clear();
- FuncInfo->SwiftErrorWorklist.clear();
+ FuncInfo->SwiftErrorVRegDefMap.clear();
+ FuncInfo->SwiftErrorVRegUpwardsUse.clear();
+ FuncInfo->SwiftErrorArg = nullptr;
// Check if function has a swifterror argument.
+ bool HaveSeenSwiftErrorArg = false;
for (Function::const_arg_iterator AI = Fn.arg_begin(), AE = Fn.arg_end();
AI != AE; ++AI)
- if (AI->hasSwiftErrorAttr())
+ if (AI->hasSwiftErrorAttr()) {
+ assert(!HaveSeenSwiftErrorArg &&
+ "Must have only one swifterror parameter");
+ HaveSeenSwiftErrorArg = true;
+ FuncInfo->SwiftErrorArg = &*AI;
FuncInfo->SwiftErrorVals.push_back(&*AI);
+ }
for (const auto &LLVMBB : Fn)
for (const auto &Inst : LLVMBB) {
@@ -1174,95 +1181,152 @@ static void setupSwiftErrorVals(const Function &Fn, const TargetLowering *TLI,
}
}
-/// For each basic block, merge incoming swifterror values or simply propagate
-/// them. The merged results will be saved in SwiftErrorMap. For predecessors
-/// that are not yet visited, we create virtual registers to hold the swifterror
-/// values and save them in SwiftErrorWorklist.
-static void mergeIncomingSwiftErrors(FunctionLoweringInfo *FuncInfo,
- const TargetLowering *TLI,
- const TargetInstrInfo *TII,
- const BasicBlock *LLVMBB,
- SelectionDAGBuilder *SDB) {
+static void createSwiftErrorEntriesInEntryBlock(FunctionLoweringInfo *FuncInfo,
+ const TargetLowering *TLI,
+ const TargetInstrInfo *TII,
+ const BasicBlock *LLVMBB,
+ SelectionDAGBuilder *SDB) {
if (!TLI->supportSwiftError())
return;
- // We should only do this when we have swifterror parameter or swifterror
+ // We only need to do this when we have swifterror parameter or swifterror
// alloc.
if (FuncInfo->SwiftErrorVals.empty())
return;
- // At beginning of a basic block, insert PHI nodes or get the virtual
- // register from the only predecessor, and update SwiftErrorMap; if one
- // of the predecessors is not visited, update SwiftErrorWorklist.
- // At end of a basic block, if a block is in SwiftErrorWorklist, insert copy
- // to sync up the virtual register assignment.
-
- // Always create a virtual register for each swifterror value in entry block.
- auto &DL = SDB->DAG.getDataLayout();
- const TargetRegisterClass *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
if (pred_begin(LLVMBB) == pred_end(LLVMBB)) {
- for (unsigned I = 0, E = FuncInfo->SwiftErrorVals.size(); I < E; I++) {
+ auto &DL = FuncInfo->MF->getDataLayout();
+ auto const *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
+ for (const auto *SwiftErrorVal : FuncInfo->SwiftErrorVals) {
+ // We will always generate a copy from the argument. It is always used at
+ // least by the 'return' of the swifterror.
+ if (FuncInfo->SwiftErrorArg && FuncInfo->SwiftErrorArg == SwiftErrorVal)
+ continue;
unsigned VReg = FuncInfo->MF->getRegInfo().createVirtualRegister(RC);
// Assign Undef to Vreg. We construct MI directly to make sure it works
// with FastISel.
- BuildMI(*FuncInfo->MBB, FuncInfo->InsertPt, SDB->getCurDebugLoc(),
- TII->get(TargetOpcode::IMPLICIT_DEF), VReg);
- FuncInfo->SwiftErrorMap[FuncInfo->MBB].push_back(VReg);
+ BuildMI(*FuncInfo->MBB, FuncInfo->MBB->getFirstNonPHI(),
+ SDB->getCurDebugLoc(), TII->get(TargetOpcode::IMPLICIT_DEF),
+ VReg);
+ FuncInfo->setCurrentSwiftErrorVReg(FuncInfo->MBB, SwiftErrorVal, VReg);
}
- return;
}
+}
- if (auto *UniquePred = LLVMBB->getUniquePredecessor()) {
- auto *UniquePredMBB = FuncInfo->MBBMap[UniquePred];
- if (!FuncInfo->SwiftErrorMap.count(UniquePredMBB)) {
- // Update SwiftErrorWorklist with a new virtual register.
- for (unsigned I = 0, E = FuncInfo->SwiftErrorVals.size(); I < E; I++) {
- unsigned VReg = FuncInfo->MF->getRegInfo().createVirtualRegister(RC);
- FuncInfo->SwiftErrorWorklist[UniquePredMBB].push_back(VReg);
- // Propagate the information from the single predecessor.
- FuncInfo->SwiftErrorMap[FuncInfo->MBB].push_back(VReg);
- }
- return;
- }
- // Propagate the information from the single predecessor.
- FuncInfo->SwiftErrorMap[FuncInfo->MBB] =
- FuncInfo->SwiftErrorMap[UniquePredMBB];
+/// Propagate swifterror values through the machine function CFG.
+static void propagateSwiftErrorVRegs(FunctionLoweringInfo *FuncInfo) {
+ auto *TLI = FuncInfo->TLI;
+ if (!TLI->supportSwiftError())
return;
- }
- // For the case of multiple predecessors, update SwiftErrorWorklist.
- // Handle the case where we have two or more predecessors being the same.
- for (const_pred_iterator PI = pred_begin(LLVMBB), PE = pred_end(LLVMBB);
- PI != PE; ++PI) {
- auto *PredMBB = FuncInfo->MBBMap[*PI];
- if (!FuncInfo->SwiftErrorMap.count(PredMBB) &&
- !FuncInfo->SwiftErrorWorklist.count(PredMBB)) {
- for (unsigned I = 0, E = FuncInfo->SwiftErrorVals.size(); I < E; I++) {
- unsigned VReg = FuncInfo->MF->getRegInfo().createVirtualRegister(RC);
- // When we actually visit the basic block PredMBB, we will materialize
- // the virtual register assignment in copySwiftErrorsToFinalVRegs.
- FuncInfo->SwiftErrorWorklist[PredMBB].push_back(VReg);
+ // We only need to do this when we have swifterror parameter or swifterror
+ // alloc.
+ if (FuncInfo->SwiftErrorVals.empty())
+ return;
+
+ // For each machine basic block in reverse post order.
+ ReversePostOrderTraversal<MachineFunction *> RPOT(FuncInfo->MF);
+ for (ReversePostOrderTraversal<MachineFunction *>::rpo_iterator
+ It = RPOT.begin(),
+ E = RPOT.end();
+ It != E; ++It) {
+ MachineBasicBlock *MBB = *It;
+
+ // For each swifterror value in the function.
+ for(const auto *SwiftErrorVal : FuncInfo->SwiftErrorVals) {
+ auto Key = std::make_pair(MBB, SwiftErrorVal);
+ auto UUseIt = FuncInfo->SwiftErrorVRegUpwardsUse.find(Key);
+ auto VRegDefIt = FuncInfo->SwiftErrorVRegDefMap.find(Key);
+ bool UpwardsUse = UUseIt != FuncInfo->SwiftErrorVRegUpwardsUse.end();
+ unsigned UUseVReg = UpwardsUse ? UUseIt->second : 0;
+ bool DownwardDef = VRegDefIt != FuncInfo->SwiftErrorVRegDefMap.end();
+ assert(!(UpwardsUse && !DownwardDef) &&
+ "We can't have an upwards use but no downwards def");
+
+ // If there is no upwards exposed use and an entry for the swifterror in
+ // the def map for this value we don't need to do anything: We already
+ // have a downward def for this basic block.
+ if (!UpwardsUse && DownwardDef)
+ continue;
+
+ // Otherwise we either have an upwards exposed use vreg that we need to
+ // materialize or need to forward the downward def from predecessors.
+
+ // Check whether we have a single vreg def from all predecessors.
+ // Otherwise we need a phi.
+ SmallVector<std::pair<MachineBasicBlock *, unsigned>, 4> VRegs;
+ SmallSet<const MachineBasicBlock*, 8> Visited;
+ for (auto *Pred : MBB->predecessors()) {
+ if (!Visited.insert(Pred).second)
+ continue;
+ VRegs.push_back(std::make_pair(
+ Pred, FuncInfo->getOrCreateSwiftErrorVReg(Pred, SwiftErrorVal)));
+ if (Pred != MBB)
+ continue;
+ // We have a self-edge.
+ // If there was no upwards use in this basic block there is now one: the
+ // phi needs to use it self.
+ if (!UpwardsUse) {
+ UpwardsUse = true;
+ UUseIt = FuncInfo->SwiftErrorVRegUpwardsUse.find(Key);
+ assert(UUseIt != FuncInfo->SwiftErrorVRegUpwardsUse.end());
+ UUseVReg = UUseIt->second;
+ }
}
- }
- }
- // For the case of multiple predecessors, create a virtual register for
- // each swifterror value and generate Phi node.
- for (unsigned I = 0, E = FuncInfo->SwiftErrorVals.size(); I < E; I++) {
- unsigned VReg = FuncInfo->MF->getRegInfo().createVirtualRegister(RC);
- FuncInfo->SwiftErrorMap[FuncInfo->MBB].push_back(VReg);
-
- MachineInstrBuilder SwiftErrorPHI = BuildMI(*FuncInfo->MBB,
- FuncInfo->InsertPt, SDB->getCurDebugLoc(),
- TII->get(TargetOpcode::PHI), VReg);
- for (const_pred_iterator PI = pred_begin(LLVMBB), PE = pred_end(LLVMBB);
- PI != PE; ++PI) {
- auto *PredMBB = FuncInfo->MBBMap[*PI];
- unsigned SwiftErrorReg = FuncInfo->SwiftErrorMap.count(PredMBB) ?
- FuncInfo->SwiftErrorMap[PredMBB][I] :
- FuncInfo->SwiftErrorWorklist[PredMBB][I];
- SwiftErrorPHI.addReg(SwiftErrorReg)
- .addMBB(PredMBB);
+ // We need a phi node if we have more than one predecessor with different
+ // downward defs.
+ bool needPHI =
+ VRegs.size() >= 1 &&
+ std::find_if(
+ VRegs.begin(), VRegs.end(),
+ [&](const std::pair<const MachineBasicBlock *, unsigned> &V)
+ -> bool { return V.second != VRegs[0].second; }) !=
+ VRegs.end();
+
+ // If there is no upwards exposed used and we don't need a phi just
+ // forward the swifterror vreg from the predecessor(s).
+ if (!UpwardsUse && !needPHI) {
+ assert(!VRegs.empty() &&
+ "No predecessors? The entry block should bail out earlier");
+ // Just forward the swifterror vreg from the predecessor(s).
+ FuncInfo->setCurrentSwiftErrorVReg(MBB, SwiftErrorVal, VRegs[0].second);
+ continue;
+ }
+
+ auto DLoc = isa<Instruction>(SwiftErrorVal)
+ ? dyn_cast<Instruction>(SwiftErrorVal)->getDebugLoc()
+ : DebugLoc();
+ const auto *TII = FuncInfo->MF->getSubtarget().getInstrInfo();
+
+ // If we don't need a phi create a copy to the upward exposed vreg.
+ if (!needPHI) {
+ assert(UpwardsUse);
+ unsigned DestReg = UUseVReg;
+ BuildMI(*MBB, MBB->getFirstNonPHI(), DLoc, TII->get(TargetOpcode::COPY),
+ DestReg)
+ .addReg(VRegs[0].second);
+ continue;
+ }
+
+ // We need a phi: if there is an upwards exposed use we already have a
+ // destination virtual register number otherwise we generate a new one.
+ auto &DL = FuncInfo->MF->getDataLayout();
+ auto const *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
+ unsigned PHIVReg =
+ UpwardsUse ? UUseVReg
+ : FuncInfo->MF->getRegInfo().createVirtualRegister(RC);
+ MachineInstrBuilder SwiftErrorPHI =
+ BuildMI(*MBB, MBB->getFirstNonPHI(), DLoc,
+ TII->get(TargetOpcode::PHI), PHIVReg);
+ for (auto BBRegPair : VRegs) {
+ SwiftErrorPHI.addReg(BBRegPair.second).addMBB(BBRegPair.first);
+ }
+
+ // We did not have a definition in this block before: store the phi's vreg
+ // as this block downward exposed def.
+ if (!UpwardsUse)
+ FuncInfo->setCurrentSwiftErrorVReg(MBB, SwiftErrorVal, PHIVReg);
}
}
}
@@ -1313,7 +1377,7 @@ void SelectionDAGISel::SelectAllBasicBlocks(const Function &Fn) {
if (!FuncInfo->MBB)
continue; // Some blocks like catchpads have no code or MBB.
FuncInfo->InsertPt = FuncInfo->MBB->getFirstNonPHI();
- mergeIncomingSwiftErrors(FuncInfo, TLI, TII, LLVMBB, SDB);
+ createSwiftErrorEntriesInEntryBlock(FuncInfo, TLI, TII, LLVMBB, SDB);
// Setup an EH landing-pad block.
FuncInfo->ExceptionPointerVirtReg = 0;
@@ -1490,6 +1554,8 @@ void SelectionDAGISel::SelectAllBasicBlocks(const Function &Fn) {
FuncInfo->PHINodesToUpdate.clear();
}
+ propagateSwiftErrorVRegs(FuncInfo);
+
delete FastIS;
SDB->clearDanglingDebugInfo();
SDB->SPDescriptor.resetPerFunctionState();
OpenPOWER on IntegriCloud