LLVM  8.0.1
CGSCCPassManager.cpp
Go to the documentation of this file.
1 //===- CGSCCPassManager.cpp - Managing & running CGSCC passes -------------===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 
11 #include "llvm/ADT/ArrayRef.h"
12 #include "llvm/ADT/Optional.h"
13 #include "llvm/ADT/STLExtras.h"
14 #include "llvm/ADT/SetVector.h"
15 #include "llvm/ADT/SmallPtrSet.h"
16 #include "llvm/ADT/SmallVector.h"
19 #include "llvm/IR/CallSite.h"
20 #include "llvm/IR/Constant.h"
21 #include "llvm/IR/InstIterator.h"
22 #include "llvm/IR/Instruction.h"
23 #include "llvm/IR/PassManager.h"
24 #include "llvm/Support/Casting.h"
25 #include "llvm/Support/Debug.h"
27 #include <algorithm>
28 #include <cassert>
29 #include <iterator>
30 
31 #define DEBUG_TYPE "cgscc"
32 
33 using namespace llvm;
34 
35 // Explicit template instantiations and specialization definitions for core
36 // template typedefs.
37 namespace llvm {
38 
39 // Explicit instantiations for the core proxy templates.
46  LazyCallGraph::SCC, LazyCallGraph &>;
48 
49 /// Explicitly specialize the pass manager run method to handle call graph
50 /// updates.
51 template <>
53 PassManager<LazyCallGraph::SCC, CGSCCAnalysisManager, LazyCallGraph &,
54  CGSCCUpdateResult &>::run(LazyCallGraph::SCC &InitialC,
55  CGSCCAnalysisManager &AM,
56  LazyCallGraph &G, CGSCCUpdateResult &UR) {
57  // Request PassInstrumentation from analysis manager, will use it to run
58  // instrumenting callbacks for the passes later.
61 
63 
64  if (DebugLogging)
65  dbgs() << "Starting CGSCC pass manager run.\n";
66 
67  // The SCC may be refined while we are running passes over it, so set up
68  // a pointer that we can update.
69  LazyCallGraph::SCC *C = &InitialC;
70 
71  for (auto &Pass : Passes) {
72  if (DebugLogging)
73  dbgs() << "Running pass: " << Pass->name() << " on " << *C << "\n";
74 
75  // Check the PassInstrumentation's BeforePass callbacks before running the
76  // pass, skip its execution completely if asked to (callback returns false).
77  if (!PI.runBeforePass(*Pass, *C))
78  continue;
79 
80  PreservedAnalyses PassPA = Pass->run(*C, AM, G, UR);
81 
82  if (UR.InvalidatedSCCs.count(C))
83  PI.runAfterPassInvalidated<LazyCallGraph::SCC>(*Pass);
84  else
85  PI.runAfterPass<LazyCallGraph::SCC>(*Pass, *C);
86 
87  // Update the SCC if necessary.
88  C = UR.UpdatedC ? UR.UpdatedC : C;
89 
90  // If the CGSCC pass wasn't able to provide a valid updated SCC, the
91  // current SCC may simply need to be skipped if invalid.
92  if (UR.InvalidatedSCCs.count(C)) {
93  LLVM_DEBUG(dbgs() << "Skipping invalidated root or island SCC!\n");
94  break;
95  }
96  // Check that we didn't miss any update scenario.
97  assert(C->begin() != C->end() && "Cannot have an empty SCC!");
98 
99  // Update the analysis manager as each pass runs and potentially
100  // invalidates analyses.
101  AM.invalidate(*C, PassPA);
102 
103  // Finally, we intersect the final preserved analyses to compute the
104  // aggregate preserved set for this pass manager.
105  PA.intersect(std::move(PassPA));
106 
107  // FIXME: Historically, the pass managers all called the LLVM context's
108  // yield function here. We don't have a generic way to acquire the
109  // context and it isn't yet clear what the right pattern is for yielding
110  // in the new pass manager so it is currently omitted.
111  // ...getContext().yield();
112  }
113 
114  // Invalidation was handled after each pass in the above loop for the current
115  // SCC. Therefore, the remaining analysis results in the AnalysisManager are
116  // preserved. We mark this with a set so that we don't need to inspect each
117  // one individually.
118  PA.preserveSet<AllAnalysesOn<LazyCallGraph::SCC>>();
119 
120  if (DebugLogging)
121  dbgs() << "Finished CGSCC pass manager run.\n";
122 
123  return PA;
124 }
125 
127  Module &M, const PreservedAnalyses &PA,
129  // If literally everything is preserved, we're done.
130  if (PA.areAllPreserved())
131  return false; // This is still a valid proxy.
132 
133  // If this proxy or the call graph is going to be invalidated, we also need
134  // to clear all the keys coming from that analysis.
135  //
136  // We also directly invalidate the FAM's module proxy if necessary, and if
137  // that proxy isn't preserved we can't preserve this proxy either. We rely on
138  // it to handle module -> function analysis invalidation in the face of
139  // structural changes and so if it's unavailable we conservatively clear the
140  // entire SCC layer as well rather than trying to do invalidation ourselves.
142  if (!(PAC.preserved() || PAC.preservedSet<AllAnalysesOn<Module>>()) ||
143  Inv.invalidate<LazyCallGraphAnalysis>(M, PA) ||
145  InnerAM->clear();
146 
147  // And the proxy itself should be marked as invalid so that we can observe
148  // the new call graph. This isn't strictly necessary because we cheat
149  // above, but is still useful.
150  return true;
151  }
152 
153  // Directly check if the relevant set is preserved so we can short circuit
154  // invalidating SCCs below.
155  bool AreSCCAnalysesPreserved =
157 
158  // Ok, we have a graph, so we can propagate the invalidation down into it.
159  G->buildRefSCCs();
160  for (auto &RC : G->postorder_ref_sccs())
161  for (auto &C : RC) {
163 
164  // Check to see whether the preserved set needs to be adjusted based on
165  // module-level analysis invalidation triggering deferred invalidation
166  // for this SCC.
167  if (auto *OuterProxy =
168  InnerAM->getCachedResult<ModuleAnalysisManagerCGSCCProxy>(C))
169  for (const auto &OuterInvalidationPair :
170  OuterProxy->getOuterInvalidations()) {
171  AnalysisKey *OuterAnalysisID = OuterInvalidationPair.first;
172  const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
173  if (Inv.invalidate(OuterAnalysisID, M, PA)) {
174  if (!InnerPA)
175  InnerPA = PA;
176  for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
177  InnerPA->abandon(InnerAnalysisID);
178  }
179  }
180 
181  // Check if we needed a custom PA set. If so we'll need to run the inner
182  // invalidation.
183  if (InnerPA) {
184  InnerAM->invalidate(C, *InnerPA);
185  continue;
186  }
187 
188  // Otherwise we only need to do invalidation if the original PA set didn't
189  // preserve all SCC analyses.
190  if (!AreSCCAnalysesPreserved)
191  InnerAM->invalidate(C, PA);
192  }
193 
194  // Return false to indicate that this result is still a valid proxy.
195  return false;
196 }
197 
198 template <>
200 CGSCCAnalysisManagerModuleProxy::run(Module &M, ModuleAnalysisManager &AM) {
201  // Force the Function analysis manager to also be available so that it can
202  // be accessed in an SCC analysis and proxied onward to function passes.
203  // FIXME: It is pretty awkward to just drop the result here and assert that
204  // we can find it again later.
206 
207  return Result(*InnerAM, AM.getResult<LazyCallGraphAnalysis>(M));
208 }
209 
210 AnalysisKey FunctionAnalysisManagerCGSCCProxy::Key;
211 
214  CGSCCAnalysisManager &AM,
215  LazyCallGraph &CG) {
216  // Collect the FunctionAnalysisManager from the Module layer and use that to
217  // build the proxy result.
218  //
219  // This allows us to rely on the FunctionAnalysisMangaerModuleProxy to
220  // invalidate the function analyses.
221  auto &MAM = AM.getResult<ModuleAnalysisManagerCGSCCProxy>(C, CG).getManager();
222  Module &M = *C.begin()->getFunction().getParent();
223  auto *FAMProxy = MAM.getCachedResult<FunctionAnalysisManagerModuleProxy>(M);
224  assert(FAMProxy && "The CGSCC pass manager requires that the FAM module "
225  "proxy is run on the module prior to entering the CGSCC "
226  "walk.");
227 
228  // Note that we special-case invalidation handling of this proxy in the CGSCC
229  // analysis manager's Module proxy. This avoids the need to do anything
230  // special here to recompute all of this if ever the FAM's module proxy goes
231  // away.
232  return Result(FAMProxy->getManager());
233 }
234 
236  LazyCallGraph::SCC &C, const PreservedAnalyses &PA,
238  // If literally everything is preserved, we're done.
239  if (PA.areAllPreserved())
240  return false; // This is still a valid proxy.
241 
242  // If this proxy isn't marked as preserved, then even if the result remains
243  // valid, the key itself may no longer be valid, so we clear everything.
244  //
245  // Note that in order to preserve this proxy, a module pass must ensure that
246  // the FAM has been completely updated to handle the deletion of functions.
247  // Specifically, any FAM-cached results for those functions need to have been
248  // forcibly cleared. When preserved, this proxy will only invalidate results
249  // cached on functions *still in the module* at the end of the module pass.
251  if (!PAC.preserved() && !PAC.preservedSet<AllAnalysesOn<LazyCallGraph::SCC>>()) {
252  for (LazyCallGraph::Node &N : C)
253  FAM->clear(N.getFunction(), N.getFunction().getName());
254 
255  return true;
256  }
257 
258  // Directly check if the relevant set is preserved.
259  bool AreFunctionAnalysesPreserved =
261 
262  // Now walk all the functions to see if any inner analysis invalidation is
263  // necessary.
264  for (LazyCallGraph::Node &N : C) {
265  Function &F = N.getFunction();
266  Optional<PreservedAnalyses> FunctionPA;
267 
268  // Check to see whether the preserved set needs to be pruned based on
269  // SCC-level analysis invalidation that triggers deferred invalidation
270  // registered with the outer analysis manager proxy for this function.
271  if (auto *OuterProxy =
272  FAM->getCachedResult<CGSCCAnalysisManagerFunctionProxy>(F))
273  for (const auto &OuterInvalidationPair :
274  OuterProxy->getOuterInvalidations()) {
275  AnalysisKey *OuterAnalysisID = OuterInvalidationPair.first;
276  const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
277  if (Inv.invalidate(OuterAnalysisID, C, PA)) {
278  if (!FunctionPA)
279  FunctionPA = PA;
280  for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
281  FunctionPA->abandon(InnerAnalysisID);
282  }
283  }
284 
285  // Check if we needed a custom PA set, and if so we'll need to run the
286  // inner invalidation.
287  if (FunctionPA) {
288  FAM->invalidate(F, *FunctionPA);
289  continue;
290  }
291 
292  // Otherwise we only need to do invalidation if the original PA set didn't
293  // preserve all function analyses.
294  if (!AreFunctionAnalysesPreserved)
295  FAM->invalidate(F, PA);
296  }
297 
298  // Return false to indicate that this result is still a valid proxy.
299  return false;
300 }
301 
302 } // end namespace llvm
303 
304 /// When a new SCC is created for the graph and there might be function
305 /// analysis results cached for the functions now in that SCC two forms of
306 /// updates are required.
307 ///
308 /// First, a proxy from the SCC to the FunctionAnalysisManager needs to be
309 /// created so that any subsequent invalidation events to the SCC are
310 /// propagated to the function analysis results cached for functions within it.
311 ///
312 /// Second, if any of the functions within the SCC have analysis results with
313 /// outer analysis dependencies, then those dependencies would point to the
314 /// *wrong* SCC's analysis result. We forcibly invalidate the necessary
315 /// function analyses so that they don't retain stale handles.
317  LazyCallGraph &G,
318  CGSCCAnalysisManager &AM) {
319  // Get the relevant function analysis manager.
320  auto &FAM =
322 
323  // Now walk the functions in this SCC and invalidate any function analysis
324  // results that might have outer dependencies on an SCC analysis.
325  for (LazyCallGraph::Node &N : C) {
326  Function &F = N.getFunction();
327 
328  auto *OuterProxy =
329  FAM.getCachedResult<CGSCCAnalysisManagerFunctionProxy>(F);
330  if (!OuterProxy)
331  // No outer analyses were queried, nothing to do.
332  continue;
333 
334  // Forcibly abandon all the inner analyses with dependencies, but
335  // invalidate nothing else.
336  auto PA = PreservedAnalyses::all();
337  for (const auto &OuterInvalidationPair :
338  OuterProxy->getOuterInvalidations()) {
339  const auto &InnerAnalysisIDs = OuterInvalidationPair.second;
340  for (AnalysisKey *InnerAnalysisID : InnerAnalysisIDs)
341  PA.abandon(InnerAnalysisID);
342  }
343 
344  // Now invalidate anything we found.
345  FAM.invalidate(F, PA);
346  }
347 }
348 
349 /// Helper function to update both the \c CGSCCAnalysisManager \p AM and the \c
350 /// CGSCCPassManager's \c CGSCCUpdateResult \p UR based on a range of newly
351 /// added SCCs.
352 ///
353 /// The range of new SCCs must be in postorder already. The SCC they were split
354 /// out of must be provided as \p C. The current node being mutated and
355 /// triggering updates must be passed as \p N.
356 ///
357 /// This function returns the SCC containing \p N. This will be either \p C if
358 /// no new SCCs have been split out, or it will be the new SCC containing \p N.
359 template <typename SCCRangeT>
360 static LazyCallGraph::SCC *
361 incorporateNewSCCRange(const SCCRangeT &NewSCCRange, LazyCallGraph &G,
364  using SCC = LazyCallGraph::SCC;
365 
366  if (NewSCCRange.begin() == NewSCCRange.end())
367  return C;
368 
369  // Add the current SCC to the worklist as its shape has changed.
370  UR.CWorklist.insert(C);
371  LLVM_DEBUG(dbgs() << "Enqueuing the existing SCC in the worklist:" << *C
372  << "\n");
373 
374  SCC *OldC = C;
375 
376  // Update the current SCC. Note that if we have new SCCs, this must actually
377  // change the SCC.
378  assert(C != &*NewSCCRange.begin() &&
379  "Cannot insert new SCCs without changing current SCC!");
380  C = &*NewSCCRange.begin();
381  assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
382 
383  // If we had a cached FAM proxy originally, we will want to create more of
384  // them for each SCC that was split off.
385  bool NeedFAMProxy =
387 
388  // We need to propagate an invalidation call to all but the newly current SCC
389  // because the outer pass manager won't do that for us after splitting them.
390  // FIXME: We should accept a PreservedAnalysis from the CG updater so that if
391  // there are preserved analysis we can avoid invalidating them here for
392  // split-off SCCs.
393  // We know however that this will preserve any FAM proxy so go ahead and mark
394  // that.
397  AM.invalidate(*OldC, PA);
398 
399  // Ensure the now-current SCC's function analyses are updated.
400  if (NeedFAMProxy)
401  updateNewSCCFunctionAnalyses(*C, G, AM);
402 
403  for (SCC &NewC : llvm::reverse(make_range(std::next(NewSCCRange.begin()),
404  NewSCCRange.end()))) {
405  assert(C != &NewC && "No need to re-visit the current SCC!");
406  assert(OldC != &NewC && "Already handled the original SCC!");
407  UR.CWorklist.insert(&NewC);
408  LLVM_DEBUG(dbgs() << "Enqueuing a newly formed SCC:" << NewC << "\n");
409 
410  // Ensure new SCCs' function analyses are updated.
411  if (NeedFAMProxy)
412  updateNewSCCFunctionAnalyses(NewC, G, AM);
413 
414  // Also propagate a normal invalidation to the new SCC as only the current
415  // will get one from the pass manager infrastructure.
416  AM.invalidate(NewC, PA);
417  }
418  return C;
419 }
420 
424  using Node = LazyCallGraph::Node;
425  using Edge = LazyCallGraph::Edge;
426  using SCC = LazyCallGraph::SCC;
427  using RefSCC = LazyCallGraph::RefSCC;
428 
429  RefSCC &InitialRC = InitialC.getOuterRefSCC();
430  SCC *C = &InitialC;
431  RefSCC *RC = &InitialRC;
432  Function &F = N.getFunction();
433 
434  // Walk the function body and build up the set of retained, promoted, and
435  // demoted edges.
438  SmallPtrSet<Node *, 16> RetainedEdges;
439  SmallSetVector<Node *, 4> PromotedRefTargets;
440  SmallSetVector<Node *, 4> DemotedCallTargets;
441 
442  // First walk the function and handle all called functions. We do this first
443  // because if there is a single call edge, whether there are ref edges is
444  // irrelevant.
445  for (Instruction &I : instructions(F))
446  if (auto CS = CallSite(&I))
447  if (Function *Callee = CS.getCalledFunction())
448  if (Visited.insert(Callee).second && !Callee->isDeclaration()) {
449  Node &CalleeN = *G.lookup(*Callee);
450  Edge *E = N->lookup(CalleeN);
451  // FIXME: We should really handle adding new calls. While it will
452  // make downstream usage more complex, there is no fundamental
453  // limitation and it will allow passes within the CGSCC to be a bit
454  // more flexible in what transforms they can do. Until then, we
455  // verify that new calls haven't been introduced.
456  assert(E && "No function transformations should introduce *new* "
457  "call edges! Any new calls should be modeled as "
458  "promoted existing ref edges!");
459  bool Inserted = RetainedEdges.insert(&CalleeN).second;
460  (void)Inserted;
461  assert(Inserted && "We should never visit a function twice.");
462  if (!E->isCall())
463  PromotedRefTargets.insert(&CalleeN);
464  }
465 
466  // Now walk all references.
467  for (Instruction &I : instructions(F))
468  for (Value *Op : I.operand_values())
469  if (auto *C = dyn_cast<Constant>(Op))
470  if (Visited.insert(C).second)
471  Worklist.push_back(C);
472 
473  auto VisitRef = [&](Function &Referee) {
474  Node &RefereeN = *G.lookup(Referee);
475  Edge *E = N->lookup(RefereeN);
476  // FIXME: Similarly to new calls, we also currently preclude
477  // introducing new references. See above for details.
478  assert(E && "No function transformations should introduce *new* ref "
479  "edges! Any new ref edges would require IPO which "
480  "function passes aren't allowed to do!");
481  bool Inserted = RetainedEdges.insert(&RefereeN).second;
482  (void)Inserted;
483  assert(Inserted && "We should never visit a function twice.");
484  if (E->isCall())
485  DemotedCallTargets.insert(&RefereeN);
486  };
487  LazyCallGraph::visitReferences(Worklist, Visited, VisitRef);
488 
489  // Include synthetic reference edges to known, defined lib functions.
490  for (auto *F : G.getLibFunctions())
491  // While the list of lib functions doesn't have repeats, don't re-visit
492  // anything handled above.
493  if (!Visited.count(F))
494  VisitRef(*F);
495 
496  // First remove all of the edges that are no longer present in this function.
497  // The first step makes these edges uniformly ref edges and accumulates them
498  // into a separate data structure so removal doesn't invalidate anything.
499  SmallVector<Node *, 4> DeadTargets;
500  for (Edge &E : *N) {
501  if (RetainedEdges.count(&E.getNode()))
502  continue;
503 
504  SCC &TargetC = *G.lookupSCC(E.getNode());
505  RefSCC &TargetRC = TargetC.getOuterRefSCC();
506  if (&TargetRC == RC && E.isCall()) {
507  if (C != &TargetC) {
508  // For separate SCCs this is trivial.
509  RC->switchTrivialInternalEdgeToRef(N, E.getNode());
510  } else {
511  // Now update the call graph.
512  C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, E.getNode()),
513  G, N, C, AM, UR);
514  }
515  }
516 
517  // Now that this is ready for actual removal, put it into our list.
518  DeadTargets.push_back(&E.getNode());
519  }
520  // Remove the easy cases quickly and actually pull them out of our list.
521  DeadTargets.erase(
522  llvm::remove_if(DeadTargets,
523  [&](Node *TargetN) {
524  SCC &TargetC = *G.lookupSCC(*TargetN);
525  RefSCC &TargetRC = TargetC.getOuterRefSCC();
526 
527  // We can't trivially remove internal targets, so skip
528  // those.
529  if (&TargetRC == RC)
530  return false;
531 
532  RC->removeOutgoingEdge(N, *TargetN);
533  LLVM_DEBUG(dbgs() << "Deleting outgoing edge from '"
534  << N << "' to '" << TargetN << "'\n");
535  return true;
536  }),
537  DeadTargets.end());
538 
539  // Now do a batch removal of the internal ref edges left.
540  auto NewRefSCCs = RC->removeInternalRefEdge(N, DeadTargets);
541  if (!NewRefSCCs.empty()) {
542  // The old RefSCC is dead, mark it as such.
543  UR.InvalidatedRefSCCs.insert(RC);
544 
545  // Note that we don't bother to invalidate analyses as ref-edge
546  // connectivity is not really observable in any way and is intended
547  // exclusively to be used for ordering of transforms rather than for
548  // analysis conclusions.
549 
550  // Update RC to the "bottom".
551  assert(G.lookupSCC(N) == C && "Changed the SCC when splitting RefSCCs!");
552  RC = &C->getOuterRefSCC();
553  assert(G.lookupRefSCC(N) == RC && "Failed to update current RefSCC!");
554 
555  // The RC worklist is in reverse postorder, so we enqueue the new ones in
556  // RPO except for the one which contains the source node as that is the
557  // "bottom" we will continue processing in the bottom-up walk.
558  assert(NewRefSCCs.front() == RC &&
559  "New current RefSCC not first in the returned list!");
560  for (RefSCC *NewRC : llvm::reverse(make_range(std::next(NewRefSCCs.begin()),
561  NewRefSCCs.end()))) {
562  assert(NewRC != RC && "Should not encounter the current RefSCC further "
563  "in the postorder list of new RefSCCs.");
564  UR.RCWorklist.insert(NewRC);
565  LLVM_DEBUG(dbgs() << "Enqueuing a new RefSCC in the update worklist: "
566  << *NewRC << "\n");
567  }
568  }
569 
570  // Next demote all the call edges that are now ref edges. This helps make
571  // the SCCs small which should minimize the work below as we don't want to
572  // form cycles that this would break.
573  for (Node *RefTarget : DemotedCallTargets) {
574  SCC &TargetC = *G.lookupSCC(*RefTarget);
575  RefSCC &TargetRC = TargetC.getOuterRefSCC();
576 
577  // The easy case is when the target RefSCC is not this RefSCC. This is
578  // only supported when the target RefSCC is a child of this RefSCC.
579  if (&TargetRC != RC) {
580  assert(RC->isAncestorOf(TargetRC) &&
581  "Cannot potentially form RefSCC cycles here!");
582  RC->switchOutgoingEdgeToRef(N, *RefTarget);
583  LLVM_DEBUG(dbgs() << "Switch outgoing call edge to a ref edge from '" << N
584  << "' to '" << *RefTarget << "'\n");
585  continue;
586  }
587 
588  // We are switching an internal call edge to a ref edge. This may split up
589  // some SCCs.
590  if (C != &TargetC) {
591  // For separate SCCs this is trivial.
592  RC->switchTrivialInternalEdgeToRef(N, *RefTarget);
593  continue;
594  }
595 
596  // Now update the call graph.
597  C = incorporateNewSCCRange(RC->switchInternalEdgeToRef(N, *RefTarget), G, N,
598  C, AM, UR);
599  }
600 
601  // Now promote ref edges into call edges.
602  for (Node *CallTarget : PromotedRefTargets) {
603  SCC &TargetC = *G.lookupSCC(*CallTarget);
604  RefSCC &TargetRC = TargetC.getOuterRefSCC();
605 
606  // The easy case is when the target RefSCC is not this RefSCC. This is
607  // only supported when the target RefSCC is a child of this RefSCC.
608  if (&TargetRC != RC) {
609  assert(RC->isAncestorOf(TargetRC) &&
610  "Cannot potentially form RefSCC cycles here!");
611  RC->switchOutgoingEdgeToCall(N, *CallTarget);
612  LLVM_DEBUG(dbgs() << "Switch outgoing ref edge to a call edge from '" << N
613  << "' to '" << *CallTarget << "'\n");
614  continue;
615  }
616  LLVM_DEBUG(dbgs() << "Switch an internal ref edge to a call edge from '"
617  << N << "' to '" << *CallTarget << "'\n");
618 
619  // Otherwise we are switching an internal ref edge to a call edge. This
620  // may merge away some SCCs, and we add those to the UpdateResult. We also
621  // need to make sure to update the worklist in the event SCCs have moved
622  // before the current one in the post-order sequence
623  bool HasFunctionAnalysisProxy = false;
624  auto InitialSCCIndex = RC->find(*C) - RC->begin();
625  bool FormedCycle = RC->switchInternalEdgeToCall(
626  N, *CallTarget, [&](ArrayRef<SCC *> MergedSCCs) {
627  for (SCC *MergedC : MergedSCCs) {
628  assert(MergedC != &TargetC && "Cannot merge away the target SCC!");
629 
630  HasFunctionAnalysisProxy |=
632  *MergedC) != nullptr;
633 
634  // Mark that this SCC will no longer be valid.
635  UR.InvalidatedSCCs.insert(MergedC);
636 
637  // FIXME: We should really do a 'clear' here to forcibly release
638  // memory, but we don't have a good way of doing that and
639  // preserving the function analyses.
640  auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
641  PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
642  AM.invalidate(*MergedC, PA);
643  }
644  });
645 
646  // If we formed a cycle by creating this call, we need to update more data
647  // structures.
648  if (FormedCycle) {
649  C = &TargetC;
650  assert(G.lookupSCC(N) == C && "Failed to update current SCC!");
651 
652  // If one of the invalidated SCCs had a cached proxy to a function
653  // analysis manager, we need to create a proxy in the new current SCC as
654  // the invalidated SCCs had their functions moved.
655  if (HasFunctionAnalysisProxy)
657 
658  // Any analyses cached for this SCC are no longer precise as the shape
659  // has changed by introducing this cycle. However, we have taken care to
660  // update the proxies so it remains valide.
661  auto PA = PreservedAnalyses::allInSet<AllAnalysesOn<Function>>();
662  PA.preserve<FunctionAnalysisManagerCGSCCProxy>();
663  AM.invalidate(*C, PA);
664  }
665  auto NewSCCIndex = RC->find(*C) - RC->begin();
666  // If we have actually moved an SCC to be topologically "below" the current
667  // one due to merging, we will need to revisit the current SCC after
668  // visiting those moved SCCs.
669  //
670  // It is critical that we *do not* revisit the current SCC unless we
671  // actually move SCCs in the process of merging because otherwise we may
672  // form a cycle where an SCC is split apart, merged, split, merged and so
673  // on infinitely.
674  if (InitialSCCIndex < NewSCCIndex) {
675  // Put our current SCC back onto the worklist as we'll visit other SCCs
676  // that are now definitively ordered prior to the current one in the
677  // post-order sequence, and may end up observing more precise context to
678  // optimize the current SCC.
679  UR.CWorklist.insert(C);
680  LLVM_DEBUG(dbgs() << "Enqueuing the existing SCC in the worklist: " << *C
681  << "\n");
682  // Enqueue in reverse order as we pop off the back of the worklist.
683  for (SCC &MovedC : llvm::reverse(make_range(RC->begin() + InitialSCCIndex,
684  RC->begin() + NewSCCIndex))) {
685  UR.CWorklist.insert(&MovedC);
686  LLVM_DEBUG(dbgs() << "Enqueuing a newly earlier in post-order SCC: "
687  << MovedC << "\n");
688  }
689  }
690  }
691 
692  assert(!UR.InvalidatedSCCs.count(C) && "Invalidated the current SCC!");
693  assert(!UR.InvalidatedRefSCCs.count(RC) && "Invalidated the current RefSCC!");
694  assert(&C->getOuterRefSCC() == RC && "Current SCC not in current RefSCC!");
695 
696  // Record the current RefSCC and SCC for higher layers of the CGSCC pass
697  // manager now that all the updates have been applied.
698  if (RC != &InitialRC)
699  UR.UpdatedRC = RC;
700  if (C != &InitialC)
701  UR.UpdatedC = C;
702 
703  return *C;
704 }
Pass interface - Implemented by all &#39;passes&#39;.
Definition: Pass.h:81
void abandon()
Mark an analysis as abandoned.
Definition: PassManager.h:208
uint64_t CallInst * C
Result run(LazyCallGraph::SCC &C, CGSCCAnalysisManager &AM, LazyCallGraph &)
Computes the FunctionAnalysisManager and stores it in the result proxy.
bool invalidate(IRUnitT &IR, const PreservedAnalyses &PA)
Trigger the invalidation of some other analysis pass if not already handled and return whether it was...
Definition: PassManager.h:660
void removeOutgoingEdge(Node &SourceN, Node &TargetN)
Remove an edge whose source is in this RefSCC and target is not.
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
Definition: PassManager.h:770
This class represents lattice values for constants.
Definition: AllocatorList.h:24
SmallPriorityWorklist< LazyCallGraph::RefSCC *, 1 > & RCWorklist
Worklist of the RefSCCs queued for processing.
A Module instance is used to store all the information related to an LLVM module. ...
Definition: Module.h:65
LazyCallGraph::SCC & updateCGAndAnalysisManagerForFunctionPass(LazyCallGraph &G, LazyCallGraph::SCC &C, LazyCallGraph::Node &N, CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR)
Helper to update the call graph after running a function pass.
This provides a very simple, boring adaptor for a begin and end iterator into a range type...
SCC * lookupSCC(Node &N) const
Lookup a function&#39;s SCC in the graph.
bool invalidate(IRUnitT &IR, const PreservedAnalyses &PA, typename AnalysisManager< IRUnitT, ExtraArgTs... >::Invalidator &Inv)
Handler for invalidation of the outer IR unit, IRUnitT.
Implements a lazy call graph analysis and related passes for the new pass manager.
Function & getFunction() const
F(f)
A proxy from a FunctionAnalysisManager to an SCC.
Support structure for SCC passes to communicate updates the call graph back to the CGSCC pass manager...
RefSCC * lookupRefSCC(Node &N) const
Lookup a function&#39;s RefSCC in the graph.
PreservedAnalysisChecker getChecker() const
Build a checker for this PreservedAnalyses and the specified analysis type.
Definition: PassManager.h:305
RefSCC & getOuterRefSCC() const
LazyCallGraph::SCC * UpdatedC
If non-null, the updated current SCC being processed.
iterator begin() const
AnalysisManagerT & getManager()
Accessor for the analysis manager.
Definition: PassManager.h:1073
auto reverse(ContainerTy &&C, typename std::enable_if< has_rbegin< ContainerTy >::value >::type *=nullptr) -> decltype(make_range(C.rbegin(), C.rend()))
Definition: STLExtras.h:267
A RefSCC of the call graph.
bool insert(const value_type &X)
Insert a new element into the SetVector.
Definition: SetVector.h:142
AnalysisManager< LazyCallGraph::SCC, LazyCallGraph & > CGSCCAnalysisManager
The CGSCC analysis manager.
static void updateNewSCCFunctionAnalyses(LazyCallGraph::SCC &C, LazyCallGraph &G, CGSCCAnalysisManager &AM)
When a new SCC is created for the graph and there might be function analysis results cached for the f...
A lazily constructed view of the call graph of a module.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory)...
Definition: APInt.h:33
bool runBeforePass(const PassT &Pass, const IRUnitT &IR) const
BeforePass instrumentation point - takes Pass instance to be executed and constant reference to IR it...
amdgpu Simplify well known AMD library false Value * Callee
A set of analyses that are preserved following a run of a transformation pass.
Definition: PassManager.h:154
bool areAllPreserved() const
Test whether all analyses are preserved (and none are abandoned).
Definition: PassManager.h:322
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
SmallPtrSetImpl< LazyCallGraph::RefSCC * > & InvalidatedRefSCCs
The set of invalidated RefSCCs which should be skipped if they are found in RCWorklist.
SmallPtrSetImpl< LazyCallGraph::SCC * > & InvalidatedSCCs
The set of invalidated SCCs which should be skipped if they are found in CWorklist.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
Definition: SmallPtrSet.h:371
LazyCallGraph::RefSCC * UpdatedRC
If non-null, the updated current RefSCC being processed.
iterator end() const
A node in the call graph.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
Definition: SmallPtrSet.h:382
A class used to represent edges in the call graph.
auto remove_if(R &&Range, UnaryPredicate P) -> decltype(adl_begin(Range))
Provide wrappers to std::remove_if which take ranges instead of having to pass begin/end explicitly...
Definition: STLExtras.h:1226
iterator erase(const_iterator CI)
Definition: SmallVector.h:445
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
Definition: PassManager.h:160
SmallPriorityWorklist< LazyCallGraph::SCC *, 1 > & CWorklist
Worklist of the SCCs queued for processing.
Pseudo-analysis pass that exposes the PassInstrumentation to pass managers.
Definition: PassManager.h:575
AnalysisManager< Module > ModuleAnalysisManager
Convenience typedef for the Module analysis manager.
Definition: PassManager.h:1014
iterator_range< T > make_range(T x, T y)
Convenience function for iterating over sub-ranges.
A SetVector that performs no allocations if smaller than a certain size.
Definition: SetVector.h:298
print lazy value Lazy Value Info Printer Pass
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements...
Definition: SmallPtrSet.h:418
void runAfterPass(const PassT &Pass, const IRUnitT &IR) const
AfterPass instrumentation point - takes Pass instance that has just been executed and constant refere...
An analysis over an "inner" IR unit that provides access to an analysis manager over a "outer" IR uni...
Definition: PassManager.h:1154
This is a &#39;vector&#39; (really, a variable-sized array), optimized for the case when the array is small...
Definition: SmallVector.h:847
void runAfterPassInvalidated(const PassT &Pass) const
AfterPassInvalidated instrumentation point - takes Pass instance that has just been executed...
Result run(IRUnitT &IR, AnalysisManager< IRUnitT, ExtraArgTs... > &AM, ExtraArgTs...)
Run the analysis pass and create our proxy result object.
Definition: PassManager.h:1101
ArrayRef< Function * > getLibFunctions() const
Get the sequence of known and defined library functions.
const DataFlowGraph & G
Definition: RDFGraph.cpp:211
void invalidate(IRUnitT &IR)
Invalidate a specific analysis pass for an IR module.
Definition: PassManager.h:842
Node * lookup(const Function &F) const
Lookup a function in the graph which has already been scanned and added.
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
Definition: Debug.cpp:133
Result(AnalysisManagerT &InnerAM)
Definition: PassManager.h:1044
void switchTrivialInternalEdgeToRef(Node &SourceN, Node &TargetN)
Make an existing internal call edge between separate SCCs into a ref edge.
LLVM_ATTRIBUTE_ALWAYS_INLINE iterator end()
Definition: SmallVector.h:133
Manages a sequence of passes over a particular unit of IR.
Definition: PassManager.h:458
#define I(x, y, z)
Definition: MD5.cpp:58
#define N
PassT::Result * getCachedResult(IRUnitT &IR) const
Get the cached result of an analysis pass for a given IR unit.
Definition: PassManager.h:789
void preserve()
Mark an analysis as preserved.
Definition: PassManager.h:175
static LazyCallGraph::SCC * incorporateNewSCCRange(const SCCRangeT &NewSCCRange, LazyCallGraph &G, LazyCallGraph::Node &N, LazyCallGraph::SCC *C, CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR)
Helper function to update both the CGSCCAnalysisManager AM and the CGSCCPassManager&#39;s CGSCCUpdateResu...
An analysis pass which computes the call graph for a module.
This header provides classes for managing passes over SCCs of the call graph.
API to communicate dependencies between analyses during invalidation.
Definition: PassManager.h:642
bool invalidate(LazyCallGraph::SCC &C, const PreservedAnalyses &PA, CGSCCAnalysisManager::Invalidator &Inv)
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This templated class represents "all analyses that operate over <a particular IR unit>" (e...
Definition: PassManager.h:92
LLVM Value Representation.
Definition: Value.h:73
An SCC of the call graph.
inst_range instructions(Function *F)
Definition: InstIterator.h:134
A container for analyses that lazily runs them and caches their results.
This class provides instrumentation entry points for the Pass Manager, doing calls to callbacks regis...
This header defines various interfaces for pass management in LLVM.
#define LLVM_DEBUG(X)
Definition: Debug.h:123
static void visitReferences(SmallVectorImpl< Constant *> &Worklist, SmallPtrSetImpl< Constant *> &Visited, CallbackT Callback)
Recursively visits the defined functions whose address is reachable from every constant in the Workli...
A special type used by analysis passes to provide an address that identifies that particular analysis...
Definition: PassManager.h:71
bool allAnalysesInSetPreserved() const
Directly test whether a set of analyses is preserved.
Definition: PassManager.h:330
An analysis over an "outer" IR unit that provides access to an analysis manager over an "inner" IR un...
Definition: PassManager.h:1038