summaryrefslogtreecommitdiffstats
path: root/llvm/lib
diff options
context:
space:
mode:
Diffstat (limited to 'llvm/lib')
-rw-r--r--llvm/lib/Analysis/CGSCCPassManager.cpp51
-rw-r--r--llvm/lib/Analysis/LazyCallGraph.cpp20
2 files changed, 50 insertions, 21 deletions
diff --git a/llvm/lib/Analysis/CGSCCPassManager.cpp b/llvm/lib/Analysis/CGSCCPassManager.cpp
index 8205bba0f57..9db1e95ece7 100644
--- a/llvm/lib/Analysis/CGSCCPassManager.cpp
+++ b/llvm/lib/Analysis/CGSCCPassManager.cpp
@@ -570,25 +570,48 @@ LazyCallGraph::SCC &llvm::updateCGAndAnalysisManagerForFunctionPass(
// Otherwise we are switching an internal ref edge to a call edge. This
// may merge away some SCCs, and we add those to the UpdateResult. We also
// need to make sure to update the worklist in the event SCCs have moved
- // before the current one in the post-order sequence.
+ // before the current one in the post-order sequence
+ bool HasFunctionAnalysisProxy = false;
auto InitialSCCIndex = RC->find(*C) - RC->begin();
- auto InvalidatedSCCs = RC->switchInternalEdgeToCall(N, *CallTarget);
- if (!InvalidatedSCCs.empty()) {
+ bool FormedCycle = RC->switchInternalEdgeToCall(
+ N, *CallTarget, [&](ArrayRef<SCC *> MergedSCCs) {
+ for (SCC *MergedC : MergedSCCs) {
+ assert(MergedC != &TargetC && "Cannot merge away the target SCC!");
+
+ HasFunctionAnalysisProxy |=
+ AM.getCachedResult<FunctionAnalysisManagerCGSCCProxy>(
+ *MergedC) != nullptr;
+
+ // Mark that this SCC will no longer be valid.
+ UR.InvalidatedSCCs.insert(MergedC);
+
+ // FIXME: We should really do a 'clear' here to forcibly release
+ // memory, but we don't have a good way of doing that and
+ // preserving the function analyses.
+ auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
+ PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
+ AM.invalidate(*MergedC, PA);
+ }
+ });
+
+ // If we formed a cycle by creating this call, we need to update more data
+ // structures.
+ if (FormedCycle) {
C = &TargetC;
assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
- // Any analyses cached for this SCC are no longer precise as the shape
- // has changed by introducing this cycle.
- AM.invalidate(*C, PreservedAnalyses::none());
-
- for (SCC *InvalidatedC : InvalidatedSCCs) {
- assert(InvalidatedC != C && "Cannot invalidate the current SCC!");
- UR.InvalidatedSCCs.insert(InvalidatedC);
+ // If one of the invalidated SCCs had a cached proxy to a function
+ // analysis manager, we need to create a proxy in the new current SCC as
+ // the invaliadted SCCs had their functions moved.
+ if (HasFunctionAnalysisProxy)
+ AM.getResult<FunctionAnalysisManagerCGSCCProxy>(*C, G);
- // Also clear any cached analyses for the SCCs that are dead. This
- // isn't really necessary for correctness but can release memory.
- AM.clear(*InvalidatedC);
- }
+ // Any analyses cached for this SCC are no longer precise as the shape
+ // has changed by introducing this cycle. However, we have taken care to
+ // update the proxies so it remains valide.
+ auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
+ PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
+ AM.invalidate(*C, PA);
}
auto NewSCCIndex = RC->find(*C) - RC->begin();
if (InitialSCCIndex < NewSCCIndex) {
diff --git a/llvm/lib/Analysis/LazyCallGraph.cpp b/llvm/lib/Analysis/LazyCallGraph.cpp
index b6a9436cc1e..a4c3e43b4b0 100644
--- a/llvm/lib/Analysis/LazyCallGraph.cpp
+++ b/llvm/lib/Analysis/LazyCallGraph.cpp
@@ -456,8 +456,10 @@ updatePostorderSequenceForEdgeInsertion(
return make_range(SCCs.begin() + SourceIdx, SCCs.begin() + TargetIdx);
}
-SmallVector<LazyCallGraph::SCC *, 1>
-LazyCallGraph::RefSCC::switchInternalEdgeToCall(Node &SourceN, Node &TargetN) {
+bool
+LazyCallGraph::RefSCC::switchInternalEdgeToCall(
+ Node &SourceN, Node &TargetN,
+ function_ref<void(ArrayRef<SCC *> MergeSCCs)> MergeCB) {
assert(!(*SourceN)[TargetN].isCall() && "Must start with a ref edge!");
SmallVector<SCC *, 1> DeletedSCCs;
@@ -475,7 +477,7 @@ LazyCallGraph::RefSCC::switchInternalEdgeToCall(Node &SourceN, Node &TargetN) {
// we've just added more connectivity.
if (&SourceSCC == &TargetSCC) {
SourceN->setEdgeKind(TargetN, Edge::Call);
- return DeletedSCCs;
+ return false; // No new cycle.
}
// At this point we leverage the postorder list of SCCs to detect when the
@@ -488,7 +490,7 @@ LazyCallGraph::RefSCC::switchInternalEdgeToCall(Node &SourceN, Node &TargetN) {
int TargetIdx = SCCIndices[&TargetSCC];
if (TargetIdx < SourceIdx) {
SourceN->setEdgeKind(TargetN, Edge::Call);
- return DeletedSCCs;
+ return false; // No new cycle.
}
// Compute the SCCs which (transitively) reach the source.
@@ -555,12 +557,16 @@ LazyCallGraph::RefSCC::switchInternalEdgeToCall(Node &SourceN, Node &TargetN) {
SourceSCC, TargetSCC, SCCs, SCCIndices, ComputeSourceConnectedSet,
ComputeTargetConnectedSet);
+ // Run the user's callback on the merged SCCs before we actually merge them.
+ if (MergeCB)
+ MergeCB(makeArrayRef(MergeRange.begin(), MergeRange.end()));
+
// If the merge range is empty, then adding the edge didn't actually form any
// new cycles. We're done.
if (MergeRange.begin() == MergeRange.end()) {
// Now that the SCC structure is finalized, flip the kind to call.
SourceN->setEdgeKind(TargetN, Edge::Call);
- return DeletedSCCs;
+ return false; // No new cycle.
}
#ifndef NDEBUG
@@ -596,8 +602,8 @@ LazyCallGraph::RefSCC::switchInternalEdgeToCall(Node &SourceN, Node &TargetN) {
// Now that the SCC structure is finalized, flip the kind to call.
SourceN->setEdgeKind(TargetN, Edge::Call);
- // And we're done!
- return DeletedSCCs;
+ // And we're done, but we did form a new cycle.
+ return true;
}
void LazyCallGraph::RefSCC::switchTrivialInternalEdgeToRef(Node &SourceN,
OpenPOWER on IntegriCloud