81 cl::desc(
"Convert noalias attributes to metadata during inlining."));
86 cl::desc(
"Convert align attributes to assumptions during inlining."));
90 bool InsertLifetime) {
96 bool InsertLifetime) {
103 class LandingPadInliningInfo {
114 PHINode *InnerEHValuesPHI =
nullptr;
126 for (; isa<PHINode>(
I); ++
I) {
132 CallerLPad = cast<LandingPadInst>(
I);
138 return OuterResumeDest;
155 void addIncomingPHIValuesFor(
BasicBlock *BB)
const {
156 addIncomingPHIValuesForInto(BB, OuterResumeDest);
161 for (
unsigned i = 0, e = UnwindDestPHIValues.
size(); i != e; ++i, ++
I) {
171 BasicBlock *LandingPadInliningInfo::getInnerResumeDest() {
172 if (InnerResumeDest)
return InnerResumeDest;
177 OuterResumeDest->splitBasicBlock(SplitPoint,
178 OuterResumeDest->getName() +
".body");
181 const unsigned PHICapacity = 2;
184 Instruction *InsertPoint = &InnerResumeDest->front();
186 for (
unsigned i = 0, e = UnwindDestPHIValues.size(); i != e; ++i, ++
I) {
187 PHINode *OuterPHI = cast<PHINode>(
I);
189 OuterPHI->
getName() +
".lpad-body",
192 InnerPHI->addIncoming(OuterPHI, OuterResumeDest);
197 "eh.lpad-body", InsertPoint);
199 InnerEHValuesPHI->addIncoming(CallerLPad, OuterResumeDest);
202 return InnerResumeDest;
209 void LandingPadInliningInfo::forwardResume(
218 addIncomingPHIValuesForInto(Src, Dest);
220 InnerEHValuesPHI->addIncoming(RI->
getOperand(0), Src);
226 if (
auto *FPI = dyn_cast<FuncletPadInst>(EHPad))
227 return FPI->getParentPad();
239 while (!Worklist.
empty()) {
246 Value *UnwindDestToken =
nullptr;
247 if (
auto *CatchSwitch = dyn_cast<CatchSwitchInst>(CurrentPad)) {
248 if (CatchSwitch->hasUnwindDest()) {
249 UnwindDestToken = CatchSwitch->getUnwindDest()->getFirstNonPHI();
257 for (
auto HI = CatchSwitch->handler_begin(),
258 HE = CatchSwitch->handler_end();
259 HI != HE && !UnwindDestToken; ++
HI) {
261 auto *CatchPad = cast<CatchPadInst>(HandlerBlock->
getFirstNonPHI());
267 if (!isa<CleanupPadInst>(Child) && !isa<CatchSwitchInst>(Child))
271 auto Memo = MemoMap.
find(ChildPad);
272 if (Memo == MemoMap.
end()) {
279 Value *ChildUnwindDestToken = Memo->second;
280 if (!ChildUnwindDestToken)
286 if (isa<ConstantTokenNone>(ChildUnwindDestToken)) {
287 UnwindDestToken = ChildUnwindDestToken;
295 auto *CleanupPad = cast<CleanupPadInst>(CurrentPad);
297 if (
auto *CleanupRet = dyn_cast<CleanupReturnInst>(U)) {
298 if (
BasicBlock *RetUnwindDest = CleanupRet->getUnwindDest())
299 UnwindDestToken = RetUnwindDest->getFirstNonPHI();
304 Value *ChildUnwindDestToken;
305 if (
auto *Invoke = dyn_cast<InvokeInst>(U)) {
306 ChildUnwindDestToken = Invoke->getUnwindDest()->getFirstNonPHI();
307 }
else if (isa<CleanupPadInst>(U) || isa<CatchSwitchInst>(U)) {
309 auto Memo = MemoMap.
find(ChildPad);
310 if (Memo == MemoMap.
end()) {
317 ChildUnwindDestToken = Memo->second;
318 if (!ChildUnwindDestToken)
327 if (isa<Instruction>(ChildUnwindDestToken) &&
330 UnwindDestToken = ChildUnwindDestToken;
336 if (!UnwindDestToken)
344 if (
auto *UnwindPad = dyn_cast<Instruction>(UnwindDestToken))
347 UnwindParent =
nullptr;
348 bool ExitedOriginalPad =
false;
350 ExitedPad && ExitedPad != UnwindParent;
353 if (isa<CatchPadInst>(ExitedPad))
355 MemoMap[ExitedPad] = UnwindDestToken;
356 ExitedOriginalPad |= (ExitedPad == EHPad);
359 if (ExitedOriginalPad)
360 return UnwindDestToken;
391 if (
auto *CPI = dyn_cast<CatchPadInst>(EHPad))
392 EHPad = CPI->getCatchSwitch();
395 auto Memo = MemoMap.
find(EHPad);
396 if (Memo != MemoMap.
end())
401 assert((UnwindDestToken ==
nullptr) != (MemoMap.
count(EHPad) != 0));
403 return UnwindDestToken;
410 MemoMap[EHPad] =
nullptr;
416 Value *AncestorToken;
421 if (isa<CatchPadInst>(AncestorPad))
430 assert(!MemoMap.
count(AncestorPad) || MemoMap[AncestorPad]);
431 auto AncestorMemo = MemoMap.
find(AncestorPad);
432 if (AncestorMemo == MemoMap.
end()) {
435 UnwindDestToken = AncestorMemo->second;
439 LastUselessPad = AncestorPad;
440 MemoMap[LastUselessPad] =
nullptr;
442 TempMemos.
insert(LastUselessPad);
460 while (!Worklist.
empty()) {
462 auto Memo = MemoMap.
find(UselessPad);
463 if (Memo != MemoMap.
end() && Memo->second) {
491 MemoMap[UselessPad] = UnwindDestToken;
492 if (
auto *CatchSwitch = dyn_cast<CatchSwitchInst>(UselessPad)) {
493 assert(CatchSwitch->getUnwindDest() ==
nullptr &&
"Expected useless pad");
494 for (
BasicBlock *HandlerBlock : CatchSwitch->handlers()) {
495 auto *CatchPad = HandlerBlock->getFirstNonPHI();
498 (!isa<InvokeInst>(U) ||
500 cast<InvokeInst>(U)->getUnwindDest()->getFirstNonPHI()) ==
502 "Expected useless pad");
503 if (isa<CatchSwitchInst>(U) || isa<CleanupPadInst>(U))
504 Worklist.
push_back(cast<Instruction>(U));
508 assert(isa<CleanupPadInst>(UselessPad));
510 assert(!isa<CleanupReturnInst>(U) &&
"Expected useless pad");
511 assert((!isa<InvokeInst>(U) ||
513 cast<InvokeInst>(U)->getUnwindDest()->getFirstNonPHI()) ==
515 "Expected useless pad");
516 if (isa<CatchSwitchInst>(U) || isa<CleanupPadInst>(U))
517 Worklist.
push_back(cast<Instruction>(U));
522 return UnwindDestToken;
562 auto *FuncletPad = cast<Instruction>(FuncletBundle->Inputs[0]);
563 Value *UnwindDestToken =
565 if (UnwindDestToken && !isa<ConstantTokenNone>(UnwindDestToken))
569 if (
auto *CatchPad = dyn_cast<CatchPadInst>(FuncletPad))
570 MemoKey = CatchPad->getCatchSwitch();
572 MemoKey = FuncletPad;
573 assert(FuncletUnwindMap->count(MemoKey) &&
574 (*FuncletUnwindMap)[MemoKey] == UnwindDestToken &&
575 "must get memoized to avoid confusing later searches");
600 LandingPadInliningInfo Invoke(II);
606 if (
InvokeInst *II = dyn_cast<InvokeInst>(
I->getTerminator()))
614 InlinedLPad->reserveClauses(OuterNum);
615 for (
unsigned OuterIdx = 0; OuterIdx != OuterNum; ++OuterIdx)
616 InlinedLPad->addClause(OuterLPad->
getClause(OuterIdx));
618 InlinedLPad->setCleanup(
true);
625 &*BB, Invoke.getOuterResumeDest()))
628 Invoke.addIncomingPHIValuesFor(NewBB);
631 if (
ResumeInst *RI = dyn_cast<ResumeInst>(BB->getTerminator()))
632 Invoke.forwardResume(RI, InlinedLPads);
672 for (
Value *V : UnwindDestPHIValues) {
684 if (
auto *CRI = dyn_cast<CleanupReturnInst>(BB->getTerminator())) {
685 if (CRI->unwindsToCaller()) {
686 auto *CleanupPad = CRI->getCleanupPad();
688 CRI->eraseFromParent();
695 isa<ConstantTokenNone>(FuncletUnwindMap[CleanupPad]));
696 FuncletUnwindMap[CleanupPad] =
706 if (
auto *CatchSwitch = dyn_cast<CatchSwitchInst>(I)) {
707 if (CatchSwitch->unwindsToCaller()) {
708 Value *UnwindDestToken;
709 if (
auto *ParentPad =
710 dyn_cast<Instruction>(CatchSwitch->getParentPad())) {
720 if (UnwindDestToken && !isa<ConstantTokenNone>(UnwindDestToken))
733 CatchSwitch->getParentPad(), UnwindDest,
734 CatchSwitch->getNumHandlers(), CatchSwitch->getName(),
736 for (
BasicBlock *PadBB : CatchSwitch->handlers())
742 FuncletUnwindMap[NewCatchSwitch] = UnwindDestToken;
743 Replacement = NewCatchSwitch;
745 }
else if (!isa<FuncletPadInst>(I)) {
751 I->replaceAllUsesWith(Replacement);
752 I->eraseFromParent();
762 &*BB, UnwindDest, &FuncletUnwindMap))
771 UnwindDest->removePredecessor(InvokeBB);
783 if (!M && !CallAccessGroup)
787 VMI != VMIE; ++VMI) {
843 while (!Queue.empty()) {
844 const MDNode *M = cast<MDNode>(Queue.pop_back_val());
855 for (
const MDNode *I : MD) {
857 MDMap[
I].reset(DummyNodes.
back().get());
863 for (
const MDNode *I : MD) {
865 for (
unsigned i = 0, ie = I->getNumOperands(); i != ie; ++i) {
866 const Metadata *V = I->getOperand(i);
867 if (
const MDNode *M = dyn_cast<MDNode>(V))
870 NewOps.
push_back(const_cast<Metadata *>(V));
874 MDTuple *TempM = cast<MDTuple>(MDMap[
I]);
875 assert(TempM->isTemporary() &&
"Expected temporary node");
877 TempM->replaceAllUsesWith(NewM);
883 VMI != VMIE; ++VMI) {
938 if (NoAliasArgs.
empty())
944 DT.
recalculate(const_cast<Function&>(*CalledFunc));
957 MDB.createAnonymousAliasScopeDomain(CalledFunc->
getName());
958 for (
unsigned i = 0, e = NoAliasArgs.
size(); i != e; ++i) {
966 Name +=
": argument ";
973 MDNode *NewScope = MDB.createAnonymousAliasScope(NewDomain, Name);
974 NewScopes.
insert(std::make_pair(A, NewScope));
980 VMI != VMIE; ++VMI) {
981 if (
const Instruction *
I = dyn_cast<Instruction>(VMI->first)) {
989 bool IsArgMemOnlyCall =
false, IsFuncCall =
false;
992 if (
const LoadInst *LI = dyn_cast<LoadInst>(
I))
993 PtrArgs.
push_back(LI->getPointerOperand());
996 else if (
const VAArgInst *VAAI = dyn_cast<VAArgInst>(
I))
997 PtrArgs.
push_back(VAAI->getPointerOperand());
999 PtrArgs.
push_back(CXI->getPointerOperand());
1001 PtrArgs.
push_back(RMWI->getPointerOperand());
1002 else if (
const auto *Call = dyn_cast<CallBase>(
I)) {
1006 if (Call->doesNotAccessMemory())
1014 IsArgMemOnlyCall =
true;
1034 if (PtrArgs.
empty() && !IsFuncCall)
1044 for (
const Value *V : PtrArgs) {
1047 Objects, DL,
nullptr);
1055 bool CanDeriveViaCapture =
false, UsesAliasingPtr =
false;
1056 for (
const Value *V : ObjSet) {
1060 bool IsNonPtrConst = isa<ConstantInt>(V) || isa<ConstantFP>(V) ||
1061 isa<ConstantPointerNull>(V) ||
1062 isa<ConstantDataVector>(V) || isa<UndefValue>(V);
1069 if (
const Argument *A = dyn_cast<Argument>(V)) {
1070 if (!A->hasNoAliasAttr())
1071 UsesAliasingPtr =
true;
1073 UsesAliasingPtr =
true;
1080 if (!isa<Argument>(V) &&
1082 CanDeriveViaCapture =
true;
1087 if (IsFuncCall && !IsArgMemOnlyCall)
1088 CanDeriveViaCapture =
true;
1098 for (
const Argument *A : NoAliasArgs) {
1099 if (!ObjSet.count(A) && (!CanDeriveViaCapture ||
1112 if (!NoAliases.
empty())
1128 bool CanAddScopes = !UsesAliasingPtr;
1129 if (CanAddScopes && IsFuncCall)
1130 CanAddScopes = IsArgMemOnlyCall;
1133 for (
const Argument *A : NoAliasArgs) {
1134 if (ObjSet.count(A))
1138 if (!Scopes.
empty())
1159 bool DTCalculated =
false;
1164 if (Align && !
Arg.hasByValOrInAllocaAttr() && !
Arg.
hasNUses(0)) {
1165 if (!DTCalculated) {
1167 DTCalculated =
true;
1177 .CreateAlignmentAssumption(DL, ArgVal, Align);
1203 if (CalleeNode == CallerNode) {
1204 CallCache.assign(I,
E);
1205 I = CallCache.begin();
1206 E = CallCache.end();
1209 for (; I !=
E; ++
I) {
1210 const Value *OrigCall = I->first;
1214 if (VMI == VMap.
end() || VMI->
second ==
nullptr)
1238 if (!I->second->getFunction())
1257 Type *AggTy = cast<PointerType>(Src->
getType())->getElementType();
1265 Builder.CreateMemCpy(Dst, 1, Src, 1, Size);
1273 unsigned ByValAlignment) {
1287 if (ByValAlignment <= 1)
1309 Align =
std::max(Align, ByValAlignment);
1313 &*Caller->
begin()->begin());
1325 if (II->isLifetimeStartOrEnd())
1336 if (Ty == Int8PtrTy)
1341 if (U->getType() != Int8PtrTy)
continue;
1342 if (U->stripPointerCasts() != AI)
continue;
1370 Ctx, InlinedAtNode->getLine(), InlinedAtNode->getColumn(),
1371 InlinedAtNode->getScope(), InlinedAtNode->getInlinedAt());
1378 for (; FI != Fn->
end(); ++FI) {
1381 if (
DebugLoc DL = BI->getDebugLoc()) {
1384 auto IDL =
DebugLoc::get(DL.getLine(), DL.getCol(), DL.getScope(), IA);
1385 BI->setDebugLoc(IDL);
1389 if (CalleeHasDebugInfo)
1398 if (
auto *AI = dyn_cast<AllocaInst>(BI))
1402 BI->setDebugLoc(TheCallDL);
1419 for (
auto const &Entry : VMap) {
1420 if (!isa<BasicBlock>(Entry.first) || !Entry.second)
1422 auto *OrigBB = cast<BasicBlock>(Entry.first);
1423 auto *ClonedBB = cast<BasicBlock>(Entry.second);
1425 if (!ClonedBBs.
insert(ClonedBB).second) {
1435 BasicBlock *EntryClone = cast<BasicBlock>(VMap.lookup(&CalleeEntryBlock));
1451 uint64_t CallCount =
1452 std::min(CallSiteCount.hasValue() ? CallSiteCount.getValue() : 0,
1455 for (
auto const &Entry : VMap)
1456 if (isa<CallInst>(Entry.first))
1457 if (
auto *CI = dyn_cast_or_null<CallInst>(Entry.second))
1458 CI->updateProfWeight(CallCount, CalleeEntryCount.
getCount());
1461 if (VMap.count(&BB))
1463 if (
CallInst *CI = dyn_cast<CallInst>(&
I))
1464 CI->updateProfWeight(CalleeEntryCount.
getCount() - CallCount,
1480 if (!CalleeCount.hasValue() || !PSI)
1483 if (!CallCount.hasValue())
1487 if (CallCount.getValue() > CalleeCount.getCount())
1488 CalleeCount.setCount(0);
1490 CalleeCount.setCount(CalleeCount.getCount() - CallCount.getValue());
1504 bool InsertLifetime,
1508 &&
"Instruction not in function!");
1516 return "external or indirect";
1530 return "unsupported operand bundle";
1545 if (CalledFunc->
hasGC()) {
1546 if (!Caller->
hasGC())
1548 else if (CalledFunc->
getGC() != Caller->
getGC())
1549 return "incompatible GC";
1565 if (CalledPersonality) {
1566 if (!CallerPersonality)
1572 else if (CalledPersonality != CallerPersonality)
1573 return "incompatible personality";
1579 if (CallerPersonality) {
1585 CallSiteEHPad = cast<FuncletPadInst>(ParentFunclet->
Inputs.front());
1589 if (CallSiteEHPad) {
1593 if (isa<CleanupPadInst>(CallSiteEHPad)) {
1596 for (
const BasicBlock &CalledBB : *CalledFunc) {
1597 if (isa<CatchSwitchInst>(CalledBB.getFirstNonPHI()))
1598 return "catch in cleanup funclet";
1604 for (
const BasicBlock &CalledBB : *CalledFunc) {
1605 if (CalledBB.isEHPad())
1606 return "SEH in cleanup funclet";
1615 bool EHPadForCallUnwindsLocally =
false;
1616 if (CallSiteEHPad && CS.
isCall()) {
1618 Value *CallSiteUnwindDestToken =
1621 EHPadForCallUnwindsLocally =
1622 CallSiteUnwindDestToken &&
1623 !isa<ConstantTokenNone>(CallSiteUnwindDestToken);
1648 E = CalledFunc->
arg_end();
I !=
E; ++
I, ++AI, ++ArgNo) {
1649 Value *ActualArg = *AI;
1658 if (ActualArg != *AI)
1662 VMap[&*
I] = ActualArg;
1675 false, Returns,
".i",
1676 &InlinedFunctionInfo, TheCall);
1678 FirstNewBlock = LastBlock; ++FirstNewBlock;
1683 CalledFunc->
front());
1691 for (std::pair<Value*, Value*> &
Init : ByValInit)
1693 &*FirstNewBlock, IFI);
1721 std::vector<Value *> MergedDeoptArgs;
1722 MergedDeoptArgs.reserve(ParentDeopt->Inputs.size() +
1723 ChildOB.Inputs.size());
1725 MergedDeoptArgs.insert(MergedDeoptArgs.end(),
1726 ParentDeopt->Inputs.begin(),
1727 ParentDeopt->Inputs.end());
1728 MergedDeoptArgs.insert(MergedDeoptArgs.end(), ChildOB.Inputs.begin(),
1729 ChildOB.Inputs.end());
1731 OpDefs.
emplace_back(
"deopt", std::move(MergedDeoptArgs));
1735 if (isa<CallInst>(I))
1773 if (
auto *II = dyn_cast<IntrinsicInst>(&
I))
1786 E = FirstNewBlock->end();
I !=
E; ) {
1805 while (isa<AllocaInst>(
I) &&
1815 InsertPoint, FirstNewBlock->getInstList(), AI->
getIterator(),
I);
1832 bool InlinedMustTailCalls =
false, InlinedDeoptimizeCalls =
false;
1835 if (
CallInst *CI = dyn_cast<CallInst>(TheCall))
1836 CallSiteTailKind = CI->getTailCallKind();
1844 for (
auto II = BB->begin(); II != BB->end();) {
1852 if (!VarArgsToForward.
empty() &&
1853 ((ForwardVarArgsTo &&
1860 for (
unsigned ArgNo = 0;
1885 InlinedDeoptimizeCalls |=
1905 ChildTCK = std::min(CallSiteTailKind, ChildTCK);
1921 for (
unsigned ai = 0, ae = IFI.
StaticAllocas.size(); ai != ae; ++ai) {
1938 uint64_t AllocaTypeSize = DL.getTypeAllocSize(AllocaType);
1939 uint64_t AllocaArraySize = AIArraySize->getLimitedValue();
1942 if (AllocaArraySize == 0)
1951 AllocaArraySize * AllocaTypeSize);
1955 builder.CreateLifetimeStart(AI, AllocaSize);
1959 if (InlinedMustTailCalls &&
1962 if (InlinedDeoptimizeCalls &&
1965 IRBuilder<>(RI).CreateLifetimeEnd(AI, AllocaSize);
1980 .CreateCall(StackSave, {},
"savedstack");
1991 IRBuilder<>(RI).CreateCall(StackRestore, SavedPtr);
1999 if (
auto *II = dyn_cast<InvokeInst>(TheCall)) {
2000 BasicBlock *UnwindDest = II->getUnwindDest();
2002 if (isa<LandingPadInst>(FirstNonPHI)) {
2013 if (CallSiteEHPad) {
2028 if (CalledFn && CalledFn->isIntrinsic() && CS.
doesNotThrow())
2054 if (
auto *CleanupRet = dyn_cast<CleanupReturnInst>(BB->getTerminator()))
2055 if (CleanupRet->unwindsToCaller() && EHPadForCallUnwindsLocally)
2062 if (
auto *CatchSwitch = dyn_cast<CatchSwitchInst>(I)) {
2063 if (isa<ConstantTokenNone>(CatchSwitch->getParentPad()))
2064 CatchSwitch->setParentPad(CallSiteEHPad);
2066 auto *FPI = cast<FuncletPadInst>(
I);
2067 if (isa<ConstantTokenNone>(FPI->getParentPad()))
2068 FPI->setParentPad(CallSiteEHPad);
2073 if (InlinedDeoptimizeCalls) {
2083 Returns.
erase(NewEnd, Returns.
end());
2114 "Expected at least the deopt operand bundle");
2118 Builder.CreateCall(NewDeoptIntrinsic, CallArgs, OpBundles);
2121 Builder.CreateRetVoid();
2123 Builder.CreateRet(NewDeoptCall);
2135 if (InlinedMustTailCalls) {
2138 bool NeedBitCast = !TheCall->
use_empty() && TheCall->
getType() != NewRetTy;
2145 if (!ReturnedMustTail) {
2154 auto *OldCast = dyn_cast_or_null<BitCastInst>(RI->getReturnValue());
2157 OldCast->eraseFromParent();
2185 if (Returns.
size() == 1 && std::distance(FirstNewBlock, Caller->
end()) == 1) {
2188 FirstNewBlock->getInstList(),
2189 FirstNewBlock->begin(), FirstNewBlock->end());
2195 if (
InvokeInst *II = dyn_cast<InvokeInst>(TheCall)) {
2213 Returns[0]->eraseFromParent();
2226 BranchInst *CreatedBranchToNormalDest =
nullptr;
2227 if (
InvokeInst *II = dyn_cast<InvokeInst>(TheCall)) {
2237 CalledFunc->
getName() +
".exit");
2244 CalledFunc->
getName() +
".exit");
2258 "splitBasicBlock broken!");
2273 if (Returns.
size() > 1) {
2278 &AfterCallBB->
front());
2287 for (
unsigned i = 0, e = Returns.
size(); i != e; ++i) {
2290 "Ret value not consistent in function!");
2297 for (
unsigned i = 0, e = Returns.
size(); i != e; ++i) {
2308 if (CreatedBranchToNormalDest)
2310 }
else if (!Returns.
empty()) {
2314 if (TheCall == Returns[0]->getReturnValue())
2321 BasicBlock *ReturnBB = Returns[0]->getParent();
2329 if (CreatedBranchToNormalDest)
2333 Returns[0]->eraseFromParent();
2351 assert(cast<BranchInst>(Br)->isUnconditional() &&
"splitBasicBlock broken!");
2352 BasicBlock *CalleeEntry = cast<BranchInst>(Br)->getSuccessor(0);
bool onlyReadsMemory() const
Determine if the function does not access or only reads memory.
Return a value (possibly void), from a function.
bool isIntrinsic() const
isIntrinsic - Returns true if the function's name starts with "llvm.".
SymbolTableList< Instruction >::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
A parsed version of the target data layout string in and methods for querying it. ...
constexpr char Align[]
Key for Kernel::Arg::Metadata::mAlign.
void removePredecessor(BasicBlock *Pred, bool DontDeleteUselessPHIs=false)
Notify the BasicBlock that the predecessor Pred is no longer able to reach it.
DILocation * get() const
Get the underlying DILocation.
void addIncoming(Value *V, BasicBlock *BB)
Add an incoming value to the end of the PHI list.
static cl::opt< bool > EnableNoAliasConversion("enable-noalias-to-md-conversion", cl::init(true), cl::Hidden, cl::desc("Convert noalias attributes to metadata during inlining."))
unsigned getOrEnforceKnownAlignment(Value *V, unsigned PrefAlign, const DataLayout &DL, const Instruction *CxtI=nullptr, AssumptionCache *AC=nullptr, const DominatorTree *DT=nullptr)
Try to ensure that the alignment of V is at least PrefAlign bytes.
static void updateCallProfile(Function *Callee, const ValueToValueMapTy &VMap, const ProfileCount &CalleeEntryCount, const Instruction *TheCall, ProfileSummaryInfo *PSI, BlockFrequencyInfo *CallerBFI)
Update the branch metadata for cloned call instructions.
GCNRegPressure max(const GCNRegPressure &P1, const GCNRegPressure &P2)
This class represents an incoming formal argument to a Function.
bool replaceDbgDeclareForAlloca(AllocaInst *AI, Value *NewAllocaAddress, DIBuilder &Builder, bool DerefBefore, int Offset, bool DerefAfter)
Replaces llvm.dbg.declare instruction when the alloca it describes is replaced with a new value...
iterator erase(iterator where)
This class represents lattice values for constants.
CallGraph * CG
If non-null, InlineFunction will update the callgraph to reflect the changes it makes.
A Module instance is used to store all the information related to an LLVM module. ...
an instruction that atomically checks whether a specified value is in a memory location, and, if it is, stores a new value there.
static DebugLoc appendInlinedAt(DebugLoc DL, DILocation *InlinedAt, LLVMContext &Ctx, DenseMap< const MDNode *, MDNode *> &Cache, bool ReplaceLast=false)
Rebuild the entire inlined-at chain for this instruction so that the top of the chain now is inlined-...
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr="", Instruction *InsertBefore=nullptr)
std::function< AssumptionCache &(Function &)> * GetAssumptionCache
void push_back(const T &Elt)
void recalculate(ParentType &Func)
recalculate - compute a dominator tree for the given function
This provides a very simple, boring adaptor for a begin and end iterator into a range type...
unsigned getParamAlignment(unsigned ArgNo) const
Extract the alignment for a call or parameter (0=unknown).
Analysis providing profile information.
const CallInst * getTerminatingMustTailCall() const
Returns the call instruction marked 'musttail' prior to the terminating return instruction of this ba...
This class represents a function call, abstracting a target machine's calling convention.
void setGC(std::string Str)
static void updateCalleeCount(BlockFrequencyInfo *CallerBFI, BasicBlock *CallBB, Instruction *CallInst, Function *Callee, ProfileSummaryInfo *PSI)
Update the entry count of callee after inlining.
A cache of @llvm.assume calls within a function.
bool isSwiftError() const
Return true if this alloca is used as a swifterror argument to a call.
uint64_t getFrequency() const
Returns the frequency as a fixpoint number scaled by the entry frequency.
LLVMContext & getContext() const
All values hold a context through their type.
static void AddAlignmentAssumptions(CallSite CS, InlineFunctionInfo &IFI)
If the inlined function has non-byval align arguments, then add .assume-based alignment assumptions t...
Optional< uint64_t > getProfileCount(const Instruction *CallInst, BlockFrequencyInfo *BFI)
Returns the profile count for CallInst.
User::op_iterator arg_end()
Return the iterator pointing to the end of the argument list.
const MDOperand & getOperand(unsigned I) const
unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
An instruction for reading from memory.
InlineResult InlineFunction(CallInst *C, InlineFunctionInfo &IFI, AAResults *CalleeAAR=nullptr, bool InsertLifetime=true)
This function inlines the called function into the basic block of the caller.
static void AddAliasScopeMetadata(CallSite CS, ValueToValueMapTy &VMap, const DataLayout &DL, AAResults *CalleeAAR)
If the inlined function has noalias arguments, then add new alias scopes for each noalias argument...
static IntegerType * getInt64Ty(LLVMContext &C)
an instruction that atomically reads a memory location, combines it with another value, and then stores the result back.
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
Constant * getClause(unsigned Idx) const
Get the value of the clause at index Idx.
void reserve(size_type N)
bool isMustTailCall() const
This class captures the data input to the InlineFunction call, and records the auxiliary results prod...
iterator end()
Get an iterator to the end of the SetVector.
A node in the call graph for a module.
LLVMContext & getContext() const
Return the LLVMContext in which this type was uniqued.
static void fixupLineNumbers(Function *Fn, Function::iterator FI, Instruction *TheCall, bool CalleeHasDebugInfo)
Update inlined instructions' line numbers to to encode location where these instructions are inlined...
A templated base class for SmallPtrSet which provides the typesafe interface that is common across al...
iterator begin()
Instruction iterator methods.
Function::ProfileCount ProfileCount
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
void addCalledFunction(CallSite CS, CallGraphNode *M)
Adds a function to the list of functions called by this one.
unsigned getAllocaAddrSpace() const
amdgpu Simplify well known AMD library false Value Value const Twine & Name
const CallInst * getTerminatingDeoptimizeCall() const
Returns the call instruction calling @llvm.experimental.deoptimize prior to the terminating return in...
unsigned changeToUnreachable(Instruction *I, bool UseLLVMTrap, bool PreserveLCSSA=false, DomTreeUpdater *DTU=nullptr)
Insert an unreachable instruction before the specified instruction, making it and the rest of the cod...
const DataLayout & getDataLayout() const
Get the data layout for the module's target platform.
ProfileCount getEntryCount() const
Get the entry count for this function.
void setEntryCount(ProfileCount Count, const DenseSet< GlobalValue::GUID > *Imports=nullptr)
Set the entry count for this function.
static void HandleInlinedLandingPad(InvokeInst *II, BasicBlock *FirstNewBlock, ClonedCodeInfo &InlinedCodeInfo)
If we inlined an invoke site, we need to convert calls in the body of the inlined function into invok...
PointerType * getType() const
Overload to return most specific pointer type.
TailCallKind getTailCallKind() const
A Use represents the edge between a Value definition and its users.
uint32_t getTagID() const
Return the tag of this operand bundle as an integer.
static void updateCallerBFI(BasicBlock *CallSiteBlock, const ValueToValueMapTy &VMap, BlockFrequencyInfo *CallerBFI, BlockFrequencyInfo *CalleeBFI, const BasicBlock &CalleeEntryBlock)
Update the block frequencies of the caller after a callee has been inlined.
ReturnInst * CreateRet(Value *V)
Create a 'ret <val>' instruction.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
AttributeSet getRetAttributes() const
The attributes for the ret value are returned.
InstrTy * getInstruction() const
static void CloneAliasScopeMetadata(CallSite CS, ValueToValueMapTy &VMap)
When inlining a function that contains noalias scope metadata, this metadata needs to be cloned so th...
The only memory references in this function (if it has any) are non-volatile loads from objects point...
void setBlockFreqAndScale(const BasicBlock *ReferenceBB, uint64_t Freq, SmallPtrSetImpl< BasicBlock *> &BlocksToScale)
Set the frequency of ReferenceBB to Freq and scale the frequencies of the blocks in BlocksToScale suc...
unsigned getNumClauses() const
Get the number of clauses for this landing pad.
std::vector< CallRecord > CalledFunctionsVector
bool isScopedEHPersonality(EHPersonality Pers)
Returns true if this personality uses scope-style EH IR instructions: catchswitch, catchpad/ret, and cleanuppad/ret.
FunctionModRefBehavior getModRefBehavior(const CallBase *Call)
Return the behavior of the given call site.
void addHandler(BasicBlock *Dest)
Add an entry to the switch instruction...
ValTy * getCalledValue() const
Return the pointer to function that is being called.
This file provides interfaces used to build and manipulate a call graph, which is a very useful tool ...
bool doesNotThrow() const
Determine if the call cannot unwind.
static void UpdateCallGraphAfterInlining(CallSite CS, Function::iterator FirstNewBlock, ValueToValueMapTy &VMap, InlineFunctionInfo &IFI)
Once we have cloned code over from a callee into the caller, update the specified callgraph to reflec...
void CloneAndPruneFunctionInto(Function *NewFunc, const Function *OldFunc, ValueToValueMapTy &VMap, bool ModuleLevelChanges, SmallVectorImpl< ReturnInst *> &Returns, const char *NameSuffix="", ClonedCodeInfo *CodeInfo=nullptr, Instruction *TheCall=nullptr)
This works exactly like CloneFunctionInto, except that it does some simple constant prop and DCE on t...
Value * CreateBitCast(Value *V, Type *DestTy, const Twine &Name="")
static void UpdatePHINodes(BasicBlock *OrigBB, BasicBlock *NewBB, ArrayRef< BasicBlock *> Preds, BranchInst *BI, bool HasLoopExit)
Update the PHI nodes in OrigBB to include the values coming from NewBB.
Type * getType() const
All values are typed, get the type of this value.
std::vector< WeakTrackingVH > OperandBundleCallSites
All cloned call sites that have operand bundles attached are appended to this vector.
bool insert(const value_type &X)
Insert a new element into the SetVector.
LandingPadInst * getLandingPadInst() const
Get the landingpad instruction from the landing pad block (the unwind destination).
ValTy * getArgOperand(unsigned i) const
LLVMContext & getContext() const
FunctionModRefBehavior
Summary of how a function affects memory in the program.
AttributeSet getParamAttributes(unsigned ArgNo) const
The attributes for the argument or parameter at the given index are returned.
bool isUsedWithInAlloca() const
Return true if this alloca is used as an inalloca argument to a call.
iterator find(const KeyT &Val)
iterator begin()
Get an iterator to the beginning of the SetVector.
BlockFrequencyInfo * CalleeBFI
MDNode * getMetadata(unsigned KindID) const
Get the metadata of given kind attached to this Instruction.
unsigned getOpcode() const
Returns a member of one of the enums like Instruction::Add.
iterator_range< User::op_iterator > arg_operands()
const std::string & getGC() const
An instruction for storing to memory.
bool hasPersonalityFn() const
Check whether this function has a personality function.
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
bool PointerMayBeCapturedBefore(const Value *V, bool ReturnCaptures, bool StoreCaptures, const Instruction *I, const DominatorTree *DT, bool IncludeI=false, OrderedBasicBlock *OBB=nullptr, unsigned MaxUsesToExplore=DefaultMaxUsesToExplore)
PointerMayBeCapturedBefore - Return true if this pointer value may be captured by the enclosing funct...
constexpr char Attrs[]
Key for Kernel::Metadata::mAttrs.
void takeName(Value *V)
Transfer the name from V to this value.
InlineResult is basically true or false.
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree...
amdgpu Simplify well known AMD library false Value * Callee
Function * getDeclaration(Module *M, ID id, ArrayRef< Type *> Tys=None)
Create or insert an LLVM Function declaration for an intrinsic, and return it.
Value * getOperand(unsigned i) const
Class to represent pointers.
Optional< OperandBundleUse > getOperandBundle(StringRef Name) const
Return an operand bundle by name, if present.
static BasicBlock * HandleCallsInBlockInlinedThroughInvoke(BasicBlock *BB, BasicBlock *UnwindEdge, UnwindDestMemoTy *FuncletUnwindMap=nullptr)
When we inline a basic block into an invoke, we have to turn all of the calls that can throw into inv...
bool isCall() const
Return true if a CallInst is enclosed.
static bool isUsedByLifetimeMarker(Value *V)
iterator find(const_arg_type_t< KeyT > Val)
static cl::opt< bool > PreserveAlignmentAssumptions("preserve-alignment-assumptions-during-inlining", cl::init(true), cl::Hidden, cl::desc("Convert align attributes to assumptions during inlining."))
Optional< OperandBundleUse > getOperandBundle(StringRef Name) const
bool isVoidTy() const
Return true if this is 'void'.
const BasicBlock & getEntryBlock() const
BlockFrequencyInfo pass uses BlockFrequencyInfoImpl implementation to estimate IR basic block frequen...
static TempMDTuple getTemporary(LLVMContext &Context, ArrayRef< Metadata *> MDs)
Return a temporary node.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata *> MDs)
void setCallingConv(CallingConv::ID CC)
SmallVector< CallSite, 8 > InlinedCallSites
All of the new call sites inlined into the caller.
initializer< Ty > init(const Ty &Val)
Type * getReturnType() const
Returns the type of the ret val.
The landingpad instruction holds all of the information necessary to generate correct exception handl...
const Instruction * getFirstNonPHI() const
Returns a pointer to the first instruction in this block that is not a PHINode instruction.
bool hasNUses(unsigned N) const
Return true if this Value has exactly N users.
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
uint64_t getCount() const
Value * getCalledValue() const
LLVM Basic Block Representation.
The instances of the Type class are immutable: once they are created, they are never changed...
DISubprogram * getSubprogram() const
Get the attached subprogram.
Conditional or Unconditional Branch instruction.
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
This is an important base class in LLVM.
LLVM_ATTRIBUTE_ALWAYS_INLINE iterator begin()
Resume the propagation of an exception.
static MDTuple * getDistinct(LLVMContext &Context, ArrayRef< Metadata *> MDs)
Value * getIncomingValueForBlock(const BasicBlock *BB) const
This file contains the declarations for the subclasses of Constant, which represent the different fla...
bool isPointerTy() const
True if this is an instance of PointerType.
const Instruction & front() const
FunctionType * getFunctionType() const
unsigned getNumParams() const
Return the number of fixed parameters this function type requires.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
Interval::pred_iterator pred_begin(Interval *I)
pred_begin/pred_end - define methods so that Intervals may be used just like BasicBlocks can with the...
unsigned getPrefTypeAlignment(Type *Ty) const
Returns the preferred stack/global alignment for the specified type.
EHPersonality classifyEHPersonality(const Value *Pers)
See if the given exception handling personality function is one that we understand.
bool hasOperandBundles() const
void splice(iterator where, iplist_impl &L2)
static Value * getUnwindDestTokenHelper(Instruction *EHPad, UnwindDestMemoTy &MemoMap)
Helper for getUnwindDestToken that does the descendant-ward part of the search.
static bool hasLifetimeMarkers(AllocaInst *AI)
void setCallingConv(CallingConv::ID CC)
The only memory references in this function (if it has any) are non-volatile loads and stores from ob...
Interval::pred_iterator pred_end(Interval *I)
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
self_iterator getIterator()
void setTailCallKind(TailCallKind TCK)
const Function * getFunction() const
Return the function this instruction belongs to.
auto remove_if(R &&Range, UnaryPredicate P) -> decltype(adl_begin(Range))
Provide wrappers to std::remove_if which take ranges instead of having to pass begin/end explicitly...
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function. ...
static UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
MDNode * uniteAccessGroups(MDNode *AccGroups1, MDNode *AccGroups2)
Compute the union of two access-group lists.
const Constant * stripPointerCasts() const
const Value * stripPointerCasts() const
Strip off pointer casts, all-zero GEPs, and aliases.
This class represents the va_arg llvm instruction, which returns an argument of the specified type gi...
iterator erase(const_iterator CI)
unsigned getNumArgOperands() const
Class to represent profile counts.
const Value * getArraySize() const
Get the number of elements allocated.
static PointerType * getInt8PtrTy(LLVMContext &C, unsigned AS=0)
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
void setMetadata(unsigned KindID, MDNode *Node)
Set the metadata of the specified kind to the specified node.
Type * getAllocatedType() const
Return the type that is being allocated by the instruction.
bool isIdentifiedFunctionLocal(const Value *V)
Return true if V is umabigously identified at the function-level.
iterator_range< T > make_range(T x, T y)
Convenience function for iterating over sub-ranges.
static void HandleByValArgumentInit(Value *Dst, Value *Src, Module *M, BasicBlock *InsertBlock, InlineFunctionInfo &IFI)
const InstListType & getInstList() const
Return the underlying instruction list container.
A SetVector that performs no allocations if smaller than a certain size.
Iterator for intrusive lists based on ilist_node.
This is the shared class of boolean and integer constants.
static CatchSwitchInst * Create(Value *ParentPad, BasicBlock *UnwindDest, unsigned NumHandlers, const Twine &NameStr="", Instruction *InsertBefore=nullptr)
ValTy * getArgument(unsigned ArgNo) const
OperandBundleUse getOperandBundleAt(unsigned Index) const
CallingConv::ID getCallingConv() const
getCallingConv()/setCallingConv(CC) - These method get and set the calling convention of this functio...
Module.h This file contains the declarations for the Module class.
void setBlockFreq(const BasicBlock *BB, uint64_t Freq)
LLVM_NODISCARD T pop_back_val()
static Value * getParentPad(Value *EHPad)
Helper for getUnwindDestToken/getUnwindDestTokenHelper.
void getOperandBundlesAsDefs(SmallVectorImpl< OperandBundleDef > &Defs) const
Return the list of operand bundles attached to this instruction as a vector of OperandBundleDefs.
static cl::opt< bool > NoAliases("riscv-no-aliases", cl::desc("Disable the emission of assembler pseudo instructions"), cl::init(false), cl::Hidden)
static Value * getUnwindDestToken(Instruction *EHPad, UnwindDestMemoTy &MemoMap)
Given an EH pad, find where it unwinds.
static Constant * get(Type *Ty, uint64_t V, bool isSigned=false)
If Ty is a vector type, return a Constant with a splat of the given value.
static BranchInst * Create(BasicBlock *IfTrue, Instruction *InsertBefore=nullptr)
static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr="", Instruction *InsertBefore=nullptr)
Constructors - NumReservedValues is a hint for the number of incoming edges that this phi node will h...
std::string utostr(uint64_t X, bool isNeg=false)
AttributeList getAttributes() const
Return the parameter attributes for this call.
BlockFrequency getBlockFreq(const BasicBlock *BB) const
getblockFreq - Return block frequency.
void setOperand(unsigned i, Value *Val)
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
FunctionType * getFunctionType() const
Returns the FunctionType for me.
bool isCleanup() const
Return 'true' if this landingpad instruction is a cleanup.
iterator_range< user_iterator > users()
void append(in_iter in_start, in_iter in_end)
Add the specified range to the end of the SmallVector.
amdgpu Simplify well known AMD library false Value Value * Arg
bool ContainsCalls
This is set to true if the cloned code contains a normal call instruction.
User::op_iterator arg_begin()
Return the iterator pointing to the beginning of the argument list.
This file provides various utilities for inspecting and working with the control flow graph in LLVM I...
SmallVector< AllocaInst *, 4 > StaticAllocas
InlineFunction fills this in with all static allocas that get copied into the caller.
The basic data container for the call graph of a Module of IR.
LLVM_ATTRIBUTE_ALWAYS_INLINE iterator end()
FunTy * getCaller() const
Return the caller function for this call site.
bool hasGC() const
hasGC/getGC/setGC/clearGC - The name of the garbage collection algorithm to use during code generatio...
const DebugLoc & getDebugLoc() const
Return the debug location for this node as a DebugLoc.
unsigned getNumOperandBundles() const
void registerAssumption(CallInst *CI)
Add an @llvm.assume intrinsic to this function's cache.
void emplace_back(ArgTypes &&... Args)
static DebugLoc getDebugLoc(MachineBasicBlock::instr_iterator FirstMI, MachineBasicBlock::instr_iterator LastMI)
Return the first found DebugLoc that has a DILocation, given a range of instructions.
LLVM_NODISCARD bool empty() const
SmallVector< WeakTrackingVH, 8 > InlinedCalls
InlineFunction fills this in with callsites that were inlined from the callee.
void getOperandBundlesAsDefs(SmallVectorImpl< OperandBundleDef > &Defs) const
static InvokeInst * Create(FunctionType *Ty, Value *Func, BasicBlock *IfNormal, BasicBlock *IfException, ArrayRef< Value *> Args, const Twine &NameStr, Instruction *InsertBefore=nullptr)
CallingConv::ID getCallingConv() const
StringRef getName() const
Return a constant reference to the value's name.
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation.
const Function * getParent() const
Return the enclosing method, or null if none.
static MDNode * concatenate(MDNode *A, MDNode *B)
Methods for metadata merging.
static void PropagateParallelLoopAccessMetadata(CallSite CS, ValueToValueMapTy &VMap)
When inlining a call site that has !llvm.mem.parallel_loop_access or llvm.access.group metadata...
SymbolTableList< BasicBlock >::iterator eraseFromParent()
Unlink 'this' from the containing function and delete it.
bool empty() const
Determine if the SetVector is empty or not.
bool doesNotThrow() const
Determine if the call cannot unwind.
static Value * HandleByValArgument(Value *Arg, Instruction *TheCall, const Function *CalledFunc, InlineFunctionInfo &IFI, unsigned ByValAlignment)
When inlining a call site that has a byval argument, we have to make the implicit memcpy explicit by ...
This struct can be used to capture information about code being cloned, while it is being cloned...
void setAttributes(AttributeList A)
Set the parameter attributes for this call.
unsigned getKnownAlignment(Value *V, const DataLayout &DL, const Instruction *CxtI=nullptr, AssumptionCache *AC=nullptr, const DominatorTree *DT=nullptr)
Try to infer an alignment for the specified pointer.
LLVM_NODISCARD std::enable_if<!is_simple_type< Y >::value, typename cast_retty< X, const Y >::ret_type >::type dyn_cast(const Y &Val)
const BasicBlockListType & getBasicBlockList() const
Get the underlying elements of the Function...
static ConstantTokenNone * get(LLVMContext &Context)
Return the ConstantTokenNone.
BasicBlock * splitBasicBlock(iterator I, const Twine &BBName="")
Split the basic block into two basic blocks at the specified instruction.
Value * getReturnValue() const
Convenience accessor. Returns null if there is no return value.
std::vector< CallRecord >::iterator iterator
size_type count(const_arg_type_t< KeyT > Val) const
Return 1 if the specified key is in the map, 0 otherwise.
bool isByValArgument(unsigned ArgNo) const
Determine whether this argument is passed by value.
bool isDeclaration() const
Return true if the primary definition of this global value is outside of the current translation unit...
FunTy * getCalledFunction() const
Return the function being called if this is a direct call, otherwise return null (if it's an indirect...
void GetUnderlyingObjects(Value *V, SmallVectorImpl< Value *> &Objects, const DataLayout &DL, LoopInfo *LI=nullptr, unsigned MaxLookup=6)
This method is similar to GetUnderlyingObject except that it can look through phi and select instruct...
static bool allocaWouldBeStaticInEntry(const AllocaInst *AI)
Return the result of AI->isStaticAlloca() if AI were moved to the entry block.
BlockFrequencyInfo * CallerBFI
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
bool ContainsDynamicAllocas
This is set to true if the cloned code contains a 'dynamic' alloca.
const BasicBlock & front() const
bool isAsynchronousEHPersonality(EHPersonality Pers)
Returns true if this personality function catches asynchronous exceptions.
BasicBlock * getUnwindDest() const
Module * getParent()
Get the module that this global value is contained inside of...
LLVM Value Representation.
Constant * getPersonalityFn() const
Get the personality function associated with this function.
uint64_t getTypeStoreSize(Type *Ty) const
Returns the maximum number of bytes that may be overwritten by storing the specified type...
A vector that has set insertion semantics.
void removeCallEdgeFor(CallSite CS)
Removes the edge in the node for the specified call site.
static CleanupReturnInst * Create(Value *CleanupPad, BasicBlock *UnwindBB=nullptr, Instruction *InsertBefore=nullptr)
AttributeSet getFnAttributes() const
The function attributes are returned.
BasicBlock * changeToInvokeAndSplitBasicBlock(CallInst *CI, BasicBlock *UnwindEdge)
Convert the CallInst to InvokeInst with the specified unwind edge basic block.
bool isEHPad() const
Return true if the instruction is a variety of EH-block.
void setPersonalityFn(Constant *Fn)
AttributeList getAttributes() const
Get the parameter attributes of the call.
static void HandleInlinedEHPad(InvokeInst *II, BasicBlock *FirstNewBlock, ClonedCodeInfo &InlinedCodeInfo)
If we inlined an invoke site, we need to convert calls in the body of the inlined function into invok...
unsigned getNumOperands() const
Return number of MDNode operands.
bool isEmpty() const
Return true if there are no attributes.
Value * SimplifyInstruction(Instruction *I, const SimplifyQuery &Q, OptimizationRemarkEmitter *ORE=nullptr)
See if we can compute a simplified version of this instruction.
Type * getElementType() const
static AttributeList get(LLVMContext &C, ArrayRef< std::pair< unsigned, Attribute >> Attrs)
Create an AttributeList with the specified parameters in it.
iterator_range< arg_iterator > args()
A wrapper class for inspecting calls to intrinsic functions.
const BasicBlock * getParent() const
an instruction to allocate memory on the stack
bool mayReadOrWriteMemory() const
Return true if this instruction may read or write memory.