42 #include "llvm/Config/llvm-config.h" 88 #define DEBUG_TYPE "gvn" 90 STATISTIC(NumGVNInstr,
"Number of instructions deleted");
91 STATISTIC(NumGVNLoad,
"Number of loads deleted");
92 STATISTIC(NumGVNPRE,
"Number of instructions PRE'd");
93 STATISTIC(NumGVNBlocks,
"Number of blocks merged");
94 STATISTIC(NumGVNSimpl,
"Number of instructions simplified");
95 STATISTIC(NumGVNEqProp,
"Number of equalities propagated");
96 STATISTIC(NumPRELoad,
"Number of loads PRE'd");
106 cl::desc(
"Max recurse depth in GVN (default = 1000)"));
110 cl::desc(
"Max number of dependences to attempt Load PRE (default = 100)"));
115 bool commutative =
false;
121 if (opcode != other.
opcode)
123 if (opcode == ~0U || opcode == ~1U)
125 if (type != other.
type)
179 Res.
Val.setPointer(V);
180 Res.
Val.setInt(SimpleVal);
187 Res.
Val.setPointer(MI);
188 Res.
Val.setInt(MemIntrin);
195 Res.
Val.setPointer(LI);
196 Res.
Val.setInt(LoadVal);
203 Res.
Val.setPointer(
nullptr);
204 Res.
Val.setInt(UndefVal);
215 assert(isSimpleValue() &&
"Wrong accessor");
220 assert(isCoercedLoadValue() &&
"Wrong accessor");
225 assert(isMemIntrinValue() &&
"Wrong accessor");
247 Res.
AV = std::move(AV);
277 e.varargs.push_back(lookupOrAdd(*OI));
284 if (e.varargs[0] > e.varargs[1])
286 e.commutative =
true;
289 if (
CmpInst *
C = dyn_cast<CmpInst>(I)) {
292 if (e.varargs[0] > e.varargs[1]) {
297 e.commutative =
true;
301 e.varargs.push_back(*II);
310 assert((Opcode == Instruction::ICmp || Opcode == Instruction::FCmp) &&
311 "Not a comparison!");
314 e.varargs.push_back(lookupOrAdd(LHS));
315 e.varargs.push_back(lookupOrAdd(RHS));
318 if (e.varargs[0] > e.varargs[1]) {
322 e.opcode = (Opcode << 8) | Predicate;
323 e.commutative =
true;
328 assert(EI &&
"Not an ExtractValueInst?");
345 e.opcode = Instruction::Sub;
349 e.opcode = Instruction::Mul;
358 "Expect two args for recognised intrinsics.");
370 e.varargs.push_back(lookupOrAdd(*OI));
374 e.varargs.push_back(*II);
390 valueNumbering.insert(std::make_pair(V, num));
391 if (
PHINode *PN = dyn_cast<PHINode>(V))
392 NumberingPhi[num] = PN;
396 if (AA->doesNotAccessMemory(C)) {
398 uint32_t e = assignExpNewValueNum(exp).first;
399 valueNumbering[
C] = e;
401 }
else if (MD && AA->onlyReadsMemory(C)) {
403 auto ValNum = assignExpNewValueNum(exp);
405 valueNumbering[
C] = ValNum.first;
412 valueNumbering[
C] = nextValueNumber;
413 return nextValueNumber++;
416 if (local_dep.
isDef()) {
420 valueNumbering[
C] = nextValueNumber;
421 return nextValueNumber++;
428 valueNumbering[
C] = nextValueNumber;
429 return nextValueNumber++;
433 uint32_t v = lookupOrAdd(local_cdep);
434 valueNumbering[
C] = v;
440 MD->getNonLocalCallDependency(C);
446 for (
unsigned i = 0, e = deps.size(); i != e; ++i) {
460 if (NonLocalDepCall && DT->properlyDominates(I->
getBB(), C->
getParent())){
461 cdep = NonLocalDepCall;
470 valueNumbering[
C] = nextValueNumber;
471 return nextValueNumber++;
475 valueNumbering[
C] = nextValueNumber;
476 return nextValueNumber++;
482 valueNumbering[
C] = nextValueNumber;
483 return nextValueNumber++;
488 valueNumbering[
C] = v;
491 valueNumbering[
C] = nextValueNumber;
492 return nextValueNumber++;
503 if (VI != valueNumbering.end())
506 if (!isa<Instruction>(V)) {
507 valueNumbering[V] = nextValueNumber;
508 return nextValueNumber++;
515 return lookupOrAddCall(cast<CallInst>(I));
517 case Instruction::FAdd:
518 case Instruction::Sub:
519 case Instruction::FSub:
520 case Instruction::Mul:
521 case Instruction::FMul:
522 case Instruction::UDiv:
523 case Instruction::SDiv:
524 case Instruction::FDiv:
525 case Instruction::URem:
526 case Instruction::SRem:
527 case Instruction::FRem:
528 case Instruction::Shl:
529 case Instruction::LShr:
530 case Instruction::AShr:
531 case Instruction::And:
532 case Instruction::Or:
533 case Instruction::Xor:
534 case Instruction::ICmp:
535 case Instruction::FCmp:
536 case Instruction::Trunc:
537 case Instruction::ZExt:
538 case Instruction::SExt:
539 case Instruction::FPToUI:
540 case Instruction::FPToSI:
541 case Instruction::UIToFP:
542 case Instruction::SIToFP:
543 case Instruction::FPTrunc:
544 case Instruction::FPExt:
545 case Instruction::PtrToInt:
546 case Instruction::IntToPtr:
547 case Instruction::BitCast:
549 case Instruction::ExtractElement:
550 case Instruction::InsertElement:
551 case Instruction::ShuffleVector:
552 case Instruction::InsertValue:
553 case Instruction::GetElementPtr:
556 case Instruction::ExtractValue:
557 exp = createExtractvalueExpr(cast<ExtractValueInst>(I));
559 case Instruction::PHI:
560 valueNumbering[V] = nextValueNumber;
561 NumberingPhi[nextValueNumber] = cast<PHINode>(V);
562 return nextValueNumber++;
564 valueNumbering[V] = nextValueNumber;
565 return nextValueNumber++;
568 uint32_t e = assignExpNewValueNum(exp).first;
569 valueNumbering[V] = e;
578 assert(VI != valueNumbering.end() &&
"Value not numbered?");
581 return (VI != valueNumbering.end()) ? VI->second : 0;
592 return assignExpNewValueNum(exp).first;
597 valueNumbering.clear();
598 expressionNumbering.clear();
599 NumberingPhi.clear();
600 PhiTranslateTable.clear();
609 uint32_t Num = valueNumbering.lookup(V);
610 valueNumbering.erase(V);
613 NumberingPhi.erase(Num);
620 I = valueNumbering.begin(),
E = valueNumbering.end(); I !=
E; ++
I) {
621 assert(I->first != V &&
"Inst still occurs in value numbering map!");
641 bool Changed =
runImpl(F, AC, DT, TLI, AA, &MemDep, LI, &ORE);
651 #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP) 655 E = d.
end(); I !=
E; ++
I) {
656 errs() << I->first <<
"\n";
681 std::pair<DenseMap<BasicBlock*, char>::iterator,
bool> IV =
682 FullyAvailableBlocks.
insert(std::make_pair(BB, 2));
688 if (IV.first->second == 2)
689 IV.first->second = 3;
690 return IV.first->second != 0;
698 goto SpeculationFailure;
700 for (; PI != PE; ++PI)
705 goto SpeculationFailure;
713 char &BBVal = FullyAvailableBlocks[BB];
731 char &EntryVal = FullyAvailableBlocks[Entry];
732 if (EntryVal == 0)
continue;
738 }
while (!BBWorklist.
empty());
751 if (ValuesPerBlock.
size() == 1 &&
754 assert(!ValuesPerBlock[0].AV.isUndefValue() &&
755 "Dead BB dominate this block");
756 return ValuesPerBlock[0].MaterializeAdjustedValue(LI, gvn);
775 ((AV.AV.isSimpleValue() && AV.AV.getSimpleValue() == LI) ||
776 (AV.AV.isCoercedLoadValue() && AV.AV.getCoercedLoadValue() == LI)))
792 if (isSimpleValue()) {
793 Res = getSimpleValue();
794 if (Res->
getType() != LoadTy) {
798 <<
" " << *getSimpleValue() <<
'\n' 802 }
else if (isCoercedLoadValue()) {
814 LLVM_DEBUG(
dbgs() <<
"GVN COERCED NONLOCAL LOAD:\nOffset: " << Offset
815 <<
" " << *getCoercedLoadValue() <<
'\n' 819 }
else if (isMemIntrinValue()) {
823 <<
" " << *getMemIntrinValue() <<
'\n' 827 assert(isUndefValue() &&
"Should be UndefVal");
831 assert(Res &&
"failed to materialize?");
848 User *OtherAccess =
nullptr;
851 R <<
"load of type " <<
NV(
"Type", LI->
getType()) <<
" not eliminated" 855 if (U != LI && (isa<LoadInst>(U) || isa<StoreInst>(U)) &&
856 DT->
dominates(cast<Instruction>(U), LI)) {
861 OtherAccess =
nullptr;
867 R <<
" in favor of " <<
NV(
"OtherAccess", OtherAccess);
869 R <<
" because it is clobbered by " <<
NV(
"ClobberedBy", DepInfo.
getInst());
877 "expected a local dependence");
888 if (Address && LI->
isAtomic() <= DepSI->isAtomic()) {
906 if (DepLI != LI && Address && LI->
isAtomic() <= DepLI->isAtomic()) {
934 dbgs() <<
" is clobbered by " << *I <<
'\n';);
958 if (
StoreInst *S = dyn_cast<StoreInst>(DepInst)) {
962 if (S->getValueOperand()->getType() != LI->
getType() &&
975 if (
LoadInst *
LD = dyn_cast<LoadInst>(DepInst)) {
995 dbgs() <<
" has unknown def " << *DepInst <<
'\n';);
999 void GVN::AnalyzeLoadAvailability(
LoadInst *LI, LoadDepVect &Deps,
1000 AvailValInBlkVect &ValuesPerBlock,
1001 UnavailBlkVect &UnavailableBlocks) {
1006 unsigned NumDeps = Deps.size();
1007 for (
unsigned i = 0, e = NumDeps; i != e; ++i) {
1011 if (DeadBlocks.count(DepBB)) {
1019 UnavailableBlocks.push_back(DepBB);
1026 Value *Address = Deps[i].getAddress();
1029 if (AnalyzeLoadAvailability(LI, DepInfo, Address, AV)) {
1036 UnavailableBlocks.push_back(DepBB);
1040 assert(NumDeps == ValuesPerBlock.size() + UnavailableBlocks.size() &&
1041 "post condition violation");
1044 bool GVN::PerformLoadPRE(
LoadInst *LI, AvailValInBlkVect &ValuesPerBlock,
1045 UnavailBlkVect &UnavailableBlocks) {
1055 UnavailableBlocks.end());
1078 if (!IsSafeToSpeculativelyExecute && ICF->isDominatedByICFIFromSameBlock(LI))
1082 if (TmpBB == LoadBB)
1084 if (Blockers.count(TmpBB))
1096 if (!IsSafeToSpeculativelyExecute && ICF->hasICF(TmpBB))
1108 FullyAvailableBlocks[AV.BB] =
true;
1109 for (
BasicBlock *UnavailableBB : UnavailableBlocks)
1110 FullyAvailableBlocks[UnavailableBB] =
false;
1116 if (Pred->getTerminator()->isEHPad()) {
1118 dbgs() <<
"COULD NOT PRE LOAD BECAUSE OF AN EH PAD PREDECESSOR '" 1119 << Pred->getName() <<
"': " << *LI <<
'\n');
1127 if (Pred->getTerminator()->getNumSuccessors() != 1) {
1128 if (isa<IndirectBrInst>(Pred->getTerminator())) {
1130 dbgs() <<
"COULD NOT PRE LOAD BECAUSE OF INDBR CRITICAL EDGE '" 1131 << Pred->getName() <<
"': " << *LI <<
'\n');
1135 if (LoadBB->isEHPad()) {
1137 dbgs() <<
"COULD NOT PRE LOAD BECAUSE OF AN EH PAD CRITICAL EDGE '" 1138 << Pred->getName() <<
"': " << *LI <<
'\n');
1145 PredLoads[Pred] =
nullptr;
1150 unsigned NumUnavailablePreds = PredLoads.
size() + CriticalEdgePred.
size();
1151 assert(NumUnavailablePreds != 0 &&
1152 "Fully available value should already be eliminated!");
1158 if (NumUnavailablePreds != 1)
1162 for (
BasicBlock *OrigPred : CriticalEdgePred) {
1163 BasicBlock *NewPred = splitCriticalEdges(OrigPred, LoadBB);
1164 assert(!PredLoads.
count(OrigPred) &&
"Split edges shouldn't be in map!");
1165 PredLoads[NewPred] =
nullptr;
1166 LLVM_DEBUG(
dbgs() <<
"Split critical edge " << OrigPred->getName() <<
"->" 1167 << LoadBB->getName() <<
'\n');
1171 bool CanDoPRE =
true;
1174 for (
auto &PredLoad : PredLoads) {
1175 BasicBlock *UnavailablePred = PredLoad.first;
1184 Value *LoadPtr =
nullptr;
1185 LoadPtr = Address.PHITranslateWithInsertion(LoadBB, UnavailablePred,
1197 PredLoad.second = LoadPtr;
1201 while (!NewInsts.
empty()) {
1203 markInstructionForDeletion(I);
1207 return !CriticalEdgePred.empty();
1215 <<
"INSERTED " << NewInsts.
size() <<
" INSTS: " << *NewInsts.
back()
1234 for (
const auto &PredLoad : PredLoads) {
1235 BasicBlock *UnavailablePred = PredLoad.first;
1236 Value *LoadPtr = PredLoad.second;
1248 NewLoad->setAAMetadata(Tags);
1266 MD->invalidateCachedPointerInfo(LoadPtr);
1273 if (isa<PHINode>(V))
1278 MD->invalidateCachedPointerInfo(V);
1279 markInstructionForDeletion(LI);
1282 <<
"load eliminated by PRE";
1290 using namespace ore;
1294 <<
"load of type " <<
NV(
"Type", LI->
getType()) <<
" eliminated" 1296 <<
NV(
"InfavorOfValue", AvailableValue);
1302 bool GVN::processNonLocalLoad(
LoadInst *LI) {
1312 MD->getNonLocalPointerDependency(LI, Deps);
1317 unsigned NumDeps = Deps.size();
1324 !Deps[0].getResult().isDef() && !Deps[0].getResult().isClobber()) {
1326 dbgs() <<
" has unknown dependencies\n";);
1333 OE =
GEP->idx_end();
1335 if (
Instruction *I = dyn_cast<Instruction>(OI->get()))
1336 performScalarPRE(I);
1340 AvailValInBlkVect ValuesPerBlock;
1341 UnavailBlkVect UnavailableBlocks;
1342 AnalyzeLoadAvailability(LI, Deps, ValuesPerBlock, UnavailableBlocks);
1346 if (ValuesPerBlock.empty())
1354 if (UnavailableBlocks.empty()) {
1355 LLVM_DEBUG(
dbgs() <<
"GVN REMOVING NONLOCAL LOAD: " << *LI <<
'\n');
1361 if (isa<PHINode>(V))
1370 MD->invalidateCachedPointerInfo(V);
1371 markInstructionForDeletion(LI);
1381 return PerformLoadPRE(LI, ValuesPerBlock, UnavailableBlocks);
1384 bool GVN::processAssumeIntrinsic(
IntrinsicInst *IntrinsicI) {
1386 "This function can only be called with llvm.assume intrinsic");
1389 if (
ConstantInt *Cond = dyn_cast<ConstantInt>(V)) {
1390 if (Cond->isZero()) {
1399 markInstructionForDeletion(IntrinsicI);
1401 }
else if (isa<Constant>(V)) {
1409 bool Changed =
false;
1416 Changed |= propagateEquality(V, True, Edge,
false);
1422 ReplaceWithConstMap[V] = True;
1428 if (
auto *CmpI = dyn_cast<CmpInst>(V)) {
1429 if (CmpI->getPredicate() == CmpInst::Predicate::ICMP_EQ ||
1430 CmpI->getPredicate() == CmpInst::Predicate::FCMP_OEQ ||
1431 (CmpI->getPredicate() == CmpInst::Predicate::FCMP_UEQ &&
1432 CmpI->getFastMathFlags().noNaNs())) {
1433 Value *CmpLHS = CmpI->getOperand(0);
1434 Value *CmpRHS = CmpI->getOperand(1);
1435 if (isa<Constant>(CmpLHS))
1440 if (RHSConst !=
nullptr && !isa<Constant>(CmpLHS))
1441 ReplaceWithConstMap[CmpLHS] = RHSConst;
1454 bool GVN::processLoad(
LoadInst *L) {
1463 markInstructionForDeletion(L);
1472 return processNonLocalLoad(L);
1480 dbgs() <<
" has unknown dependence\n";);
1490 markInstructionForDeletion(L);
1496 MD->invalidateCachedPointerInfo(AvailableValue);
1505 std::pair<uint32_t, bool>
1506 GVN::ValueTable::assignExpNewValueNum(Expression &Exp) {
1507 uint32_t &e = expressionNumbering[Exp];
1508 bool CreateNewValNum = !e;
1509 if (CreateNewValNum) {
1511 if (ExprIdx.size() < nextValueNumber + 1)
1512 ExprIdx.resize(nextValueNumber * 2);
1513 e = nextValueNumber;
1514 ExprIdx[nextValueNumber++] = nextExprNumber++;
1516 return {e, CreateNewValNum};
1523 LeaderTableEntry *Vals = &Gvn.LeaderTable[Num];
1524 while (Vals && Vals->BB == BB)
1533 auto FindRes = PhiTranslateTable.find({Num, Pred});
1534 if (FindRes != PhiTranslateTable.end())
1535 return FindRes->second;
1536 uint32_t NewNum = phiTranslateImpl(Pred, PhiBlock, Num, Gvn);
1537 PhiTranslateTable.insert({{Num, Pred}, NewNum});
1546 if (
PHINode *PN = NumberingPhi[Num]) {
1547 for (
unsigned i = 0; i != PN->getNumIncomingValues(); ++i) {
1548 if (PN->getParent() == PhiBlock && PN->getIncomingBlock(i) == Pred)
1558 if (!areAllValsInBB(Num, PhiBlock, Gvn))
1561 if (Num >= ExprIdx.size() || ExprIdx[Num] == 0)
1565 for (
unsigned i = 0; i < Exp.
varargs.
size(); i++) {
1569 if ((i > 1 && Exp.
opcode == Instruction::InsertValue) ||
1570 (i > 0 && Exp.
opcode == Instruction::ExtractValue))
1572 Exp.
varargs[i] = phiTranslate(Pred, PhiBlock, Exp.
varargs[i], Gvn);
1580 if (Opcode == Instruction::ICmp || Opcode == Instruction::FCmp)
1581 Exp.
opcode = (Opcode << 8) |
1583 static_cast<CmpInst::Predicate>(Exp.
opcode & 255));
1587 if (
uint32_t NewNum = expressionNumbering[Exp])
1597 auto FindRes = PhiTranslateTable.find({Num, Pred});
1598 if (FindRes != PhiTranslateTable.end())
1599 PhiTranslateTable.erase(FindRes);
1609 LeaderTableEntry Vals = LeaderTable[num];
1610 if (!Vals.Val)
return nullptr;
1612 Value *Val =
nullptr;
1613 if (DT->dominates(Vals.BB, BB)) {
1615 if (isa<Constant>(Val))
return Val;
1618 LeaderTableEntry* Next = Vals.Next;
1620 if (DT->dominates(Next->BB, BB)) {
1621 if (isa<Constant>(Next->Val))
return Next->Val;
1622 if (!Val) Val = Next->Val;
1643 "No edge between these basic blocks!");
1644 return Pred !=
nullptr;
1647 void GVN::assignBlockRPONumber(
Function &
F) {
1648 BlockRPONumber.clear();
1652 BlockRPONumber[BB] = NextBlockNumber++;
1653 InvalidBlockRPONumbers =
false;
1658 bool GVN::replaceOperandsWithConsts(
Instruction *Instr)
const {
1659 bool Changed =
false;
1660 for (
unsigned OpNum = 0; OpNum < Instr->
getNumOperands(); ++OpNum) {
1662 auto it = ReplaceWithConstMap.find(Operand);
1663 if (it != ReplaceWithConstMap.end()) {
1664 assert(!isa<Constant>(Operand) &&
1665 "Replacing constants with constants is invalid");
1667 << *it->second <<
" in instruction " << *Instr <<
'\n');
1681 bool DominatesByEdge) {
1683 Worklist.
push_back(std::make_pair(LHS, RHS));
1684 bool Changed =
false;
1689 while (!Worklist.
empty()) {
1690 std::pair<Value*, Value*> Item = Worklist.
pop_back_val();
1691 LHS = Item.first; RHS = Item.second;
1698 if (isa<Constant>(LHS) && isa<Constant>(RHS))
1702 if (isa<Constant>(LHS) || (isa<Argument>(LHS) && !isa<Constant>(RHS)))
1704 assert((isa<Argument>(LHS) || isa<Instruction>(LHS)) &&
"Unexpected value!");
1710 uint32_t LVN = VN.lookupOrAdd(LHS);
1711 if ((isa<Argument>(LHS) && isa<Argument>(RHS)) ||
1712 (isa<Instruction>(LHS) && isa<Instruction>(RHS))) {
1715 uint32_t RVN = VN.lookupOrAdd(RHS);
1731 if (RootDominatesEnd && !isa<Instruction>(RHS))
1732 addToLeaderTable(LVN, RHS, Root.
getEnd());
1738 unsigned NumReplacements =
1743 Changed |= NumReplacements > 0;
1744 NumGVNEqProp += NumReplacements;
1747 MD->invalidateCachedPointerInfo(LHS);
1763 bool isKnownFalse = !isKnownTrue;
1770 Worklist.
push_back(std::make_pair(A, RHS));
1771 Worklist.
push_back(std::make_pair(B, RHS));
1778 if (
CmpInst *Cmp = dyn_cast<CmpInst>(LHS)) {
1779 Value *Op0 = Cmp->getOperand(0), *Op1 = Cmp->getOperand(1);
1785 Worklist.
push_back(std::make_pair(Op0, Op1));
1799 if (isa<ConstantFP>(Op1) && !cast<ConstantFP>(Op1)->
isZero())
1800 Worklist.
push_back(std::make_pair(Op0, Op1));
1809 uint32_t NextNum = VN.getNextUnusedValueNumber();
1810 uint32_t Num = VN.lookupOrAddCmp(Cmp->getOpcode(), NotPred, Op0, Op1);
1813 if (Num < NextNum) {
1815 if (NotCmp && isa<Instruction>(NotCmp)) {
1816 unsigned NumReplacements =
1821 Changed |= NumReplacements > 0;
1822 NumGVNEqProp += NumReplacements;
1825 MD->invalidateCachedPointerInfo(NotCmp);
1832 if (RootDominatesEnd)
1833 addToLeaderTable(Num, NotVal, Root.
getEnd());
1846 if (isa<DbgInfoIntrinsic>(I))
1855 bool Changed =
false;
1861 markInstructionForDeletion(I);
1865 if (MD && V->getType()->isPtrOrPtrVectorTy())
1866 MD->invalidateCachedPointerInfo(V);
1874 return processAssumeIntrinsic(IntrinsicI);
1876 if (
LoadInst *LI = dyn_cast<LoadInst>(I)) {
1877 if (processLoad(LI))
1880 unsigned Num = VN.lookupOrAdd(LI);
1881 addToLeaderTable(Num, LI, LI->
getParent());
1887 if (
BranchInst *BI = dyn_cast<BranchInst>(I)) {
1888 if (!BI->isConditional())
1891 if (isa<Constant>(BI->getCondition()))
1892 return processFoldableCondBr(BI);
1894 Value *BranchCond = BI->getCondition();
1898 if (TrueSucc == FalseSucc)
1902 bool Changed =
false;
1906 Changed |= propagateEquality(BranchCond, TrueVal, TrueE,
true);
1910 Changed |= propagateEquality(BranchCond, FalseVal, FalseE,
true);
1917 Value *SwitchCond =
SI->getCondition();
1919 bool Changed =
false;
1923 for (
unsigned i = 0, n =
SI->getNumSuccessors(); i != n; ++i)
1924 ++SwitchEdges[
SI->getSuccessor(i)];
1930 if (SwitchEdges.
lookup(Dst) == 1) {
1932 Changed |= propagateEquality(SwitchCond, i->getCaseValue(),
E,
true);
1943 uint32_t NextNum = VN.getNextUnusedValueNumber();
1944 unsigned Num = VN.lookupOrAdd(I);
1948 if (isa<AllocaInst>(I) || I->
isTerminator() || isa<PHINode>(
I)) {
1949 addToLeaderTable(Num, I, I->
getParent());
1956 if (Num >= NextNum) {
1957 addToLeaderTable(Num, I, I->
getParent());
1966 addToLeaderTable(Num, I, I->
getParent());
1968 }
else if (Repl == I) {
1977 MD->invalidateCachedPointerInfo(Repl);
1978 markInstructionForDeletion(I);
1991 VN.setAliasAnalysis(&RunAA);
1997 InvalidBlockRPONumbers =
true;
1999 bool Changed =
false;
2000 bool ShouldContinue =
true;
2012 Changed |= removedBlock;
2015 unsigned Iteration = 0;
2016 while (ShouldContinue) {
2018 ShouldContinue = iterateOnFunction(F);
2019 Changed |= ShouldContinue;
2026 assignValNumForDeadCode();
2027 bool PREChanged =
true;
2028 while (PREChanged) {
2029 PREChanged = performPRE(F);
2030 Changed |= PREChanged;
2039 cleanupGlobalSets();
2050 assert(InstrsToErase.empty() &&
2051 "We expect InstrsToErase to be empty across iterations");
2052 if (DeadBlocks.count(BB))
2056 ReplaceWithConstMap.clear();
2057 bool ChangedFunction =
false;
2061 if (!ReplaceWithConstMap.empty())
2062 ChangedFunction |= replaceOperandsWithConsts(&*BI);
2063 ChangedFunction |= processInstruction(&*BI);
2065 if (InstrsToErase.empty()) {
2071 NumGVNInstr += InstrsToErase.size();
2074 bool AtStart = BI == BB->
begin();
2078 for (
auto *I : InstrsToErase) {
2079 assert(I->
getParent() == BB &&
"Removing instruction from wrong block?");
2082 if (MD) MD->removeInstruction(I);
2084 ICF->removeInstruction(I);
2087 InstrsToErase.
clear();
2095 return ChangedFunction;
2108 if (isa<Argument>(Op) || isa<Constant>(Op) || isa<GlobalValue>(Op))
2114 if (!VN.exists(Op)) {
2119 VN.phiTranslate(Pred, Curr, VN.lookup(Op), *
this);
2120 if (
Value *V = findLeader(Pred, TValNo)) {
2138 unsigned Num = VN.lookupOrAdd(Instr);
2142 addToLeaderTable(Num, Instr, Pred);
2146 bool GVN::performScalarPRE(
Instruction *CurInst) {
2147 if (isa<AllocaInst>(CurInst) || CurInst->
isTerminator() ||
2150 isa<DbgInfoIntrinsic>(CurInst))
2157 if (isa<CmpInst>(CurInst))
2167 if (isa<GetElementPtrInst>(CurInst))
2171 if (
CallInst *CallI = dyn_cast<CallInst>(CurInst))
2172 if (CallI->isInlineAsm())
2175 uint32_t ValNo = VN.lookup(CurInst);
2183 unsigned NumWith = 0;
2184 unsigned NumWithout = 0;
2189 if (InvalidBlockRPONumbers)
2190 assignBlockRPONumber(*CurrentBlock->
getParent());
2196 if (!DT->isReachableFromEntry(
P)) {
2203 assert(BlockRPONumber.count(
P) && BlockRPONumber.count(CurrentBlock) &&
2204 "Invalid BlockRPONumber map.");
2205 if (BlockRPONumber[
P] >= BlockRPONumber[CurrentBlock] &&
2207 if (
auto *Inst = dyn_cast<Instruction>(U.get()))
2208 return Inst->getParent() == CurrentBlock;
2215 uint32_t TValNo = VN.phiTranslate(
P, CurrentBlock, ValNo, *
this);
2216 Value *predV = findLeader(
P, TValNo);
2218 predMap.push_back(std::make_pair(static_cast<Value *>(
nullptr),
P));
2221 }
else if (predV == CurInst) {
2226 predMap.push_back(std::make_pair(predV,
P));
2233 if (NumWithout > 1 || NumWith == 0)
2241 if (NumWithout != 0) {
2247 if (ICF->isDominatedByICFIFromSameBlock(CurInst))
2260 toSplit.push_back(std::make_pair(PREPred->
getTerminator(), SuccNum));
2264 PREInstr = CurInst->
clone();
2265 if (!performScalarPREInsertion(PREInstr, PREPred, CurrentBlock, ValNo)) {
2275 assert(PREInstr !=
nullptr || NumWithout == 0);
2282 CurInst->
getName() +
".pre-phi", &CurrentBlock->
front());
2283 for (
unsigned i = 0, e = predMap.size(); i != e; ++i) {
2296 VN.eraseTranslateCacheEntry(ValNo, *CurrentBlock);
2297 addToLeaderTable(ValNo, Phi, CurrentBlock);
2301 MD->invalidateCachedPointerInfo(Phi);
2303 removeFromLeaderTable(ValNo, CurInst, CurrentBlock);
2307 MD->removeInstruction(CurInst);
2311 ICF->removeInstruction(CurInst);
2320 bool GVN::performPRE(
Function &F) {
2321 bool Changed =
false;
2328 if (CurrentBlock->isEHPad())
2332 BE = CurrentBlock->end();
2335 Changed |= performScalarPRE(CurInst);
2339 if (splitCriticalEdges())
2351 MD->invalidateCachedPredecessors();
2352 InvalidBlockRPONumbers =
true;
2358 bool GVN::splitCriticalEdges() {
2359 if (toSplit.empty())
2362 std::pair<Instruction *, unsigned> Edge = toSplit.pop_back_val();
2365 }
while (!toSplit.empty());
2366 if (MD) MD->invalidateCachedPredecessors();
2367 InvalidBlockRPONumbers =
true;
2372 bool GVN::iterateOnFunction(
Function &F) {
2373 cleanupGlobalSets();
2376 bool Changed =
false;
2383 Changed |= processBlock(BB);
2388 void GVN::cleanupGlobalSets() {
2390 LeaderTable.clear();
2391 BlockRPONumber.clear();
2392 TableAllocator.Reset();
2394 InvalidBlockRPONumbers =
true;
2399 void GVN::verifyRemoved(
const Instruction *Inst)
const {
2400 VN.verifyRemoved(Inst);
2405 I = LeaderTable.begin(),
E = LeaderTable.end(); I !=
E; ++
I) {
2406 const LeaderTableEntry *Node = &I->second;
2407 assert(Node->Val != Inst &&
"Inst still in value numbering scope!");
2409 while (Node->Next) {
2411 assert(Node->Val != Inst &&
"Inst still in value numbering scope!");
2425 while (!NewDead.
empty()) {
2427 if (DeadBlocks.count(D))
2432 DT->getDescendants(D, Dom);
2438 if (DeadBlocks.count(S))
2441 bool AllPredDead =
true;
2443 if (!DeadBlocks.count(
P)) {
2444 AllPredDead =
false;
2467 if (DeadBlocks.count(B))
2472 if (!DeadBlocks.count(
P))
2477 DeadBlocks.insert(
P = S);
2481 PHINode &Phi = cast<PHINode>(*II);
2485 MD->invalidateCachedPointerInfo(&Phi);
2504 bool GVN::processFoldableCondBr(
BranchInst *BI) {
2518 if (DeadBlocks.count(DeadRoot))
2522 DeadRoot = splitCriticalEdges(BI->
getParent(), DeadRoot);
2524 addDeadBlock(DeadRoot);
2532 void GVN::assignValNumForDeadCode() {
2535 unsigned ValNum = VN.lookupOrAdd(&Inst);
2536 addToLeaderTable(ValNum, &Inst, BB);
2546 :
FunctionPass(ID), NoMemDepAnalysis(NoMemDepAnalysis) {
2551 if (skipFunction(F))
2554 auto *LIWP = getAnalysisIfAvailable<LoopInfoWrapperPass>();
2556 return Impl.runImpl(
2557 F, getAnalysis<AssumptionCacheTracker>().getAssumptionCache(F),
2558 getAnalysis<DominatorTreeWrapperPass>().getDomTree(),
2559 getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(),
2560 getAnalysis<AAResultsWrapperPass>().getAAResults(),
2561 NoMemDepAnalysis ?
nullptr 2562 : &getAnalysis<MemoryDependenceWrapperPass>().getMemDep(),
2563 LIWP ? &LIWP->getLoopInfo() :
nullptr,
2564 &getAnalysis<OptimizationRemarkEmitterWrapperPass>().getORE());
2571 if (!NoMemDepAnalysis)
2582 bool NoMemDepAnalysis;
Legacy wrapper pass to provide the GlobalsAAResult object.
static AvailableValueInBlock get(BasicBlock *BB, AvailableValue &&AV)
BinaryOp_match< LHS, RHS, Instruction::And > m_And(const LHS &L, const RHS &R)
SymbolTableList< Instruction >::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
FunctionPass * createGVNPass(bool NoLoads=false)
Create a legacy GVN pass.
static cl::opt< bool > EnableLoadPRE("enable-load-pre", cl::init(true))
void eraseTranslateCacheEntry(uint32_t Num, const BasicBlock &CurrBlock)
Erase stale entry from phiTranslate cache so phiTranslate can be computed again.
A parsed version of the target data layout string in and methods for querying it. ...
bool isUndefValue() const
static ConstantInt * getFalse(LLVMContext &Context)
class_match< Value > m_Value()
Match an arbitrary value and ignore it.
This class is the base class for the comparison instructions.
static bool runImpl(Function &F, TargetLibraryInfo &TLI, DominatorTree &DT)
This is the entry point for all transforms.
AnalysisUsage & addPreserved()
Add the specified Pass class to the set of analyses preserved by this pass.
raw_ostream & errs()
This returns a reference to a raw_ostream for standard error.
Helper class for SSA formation on a set of values defined in multiple blocks.
Provides a lazy, caching interface for making common memory aliasing information queries, backed by LLVM's alias analysis passes.
int analyzeLoadFromClobberingLoad(Type *LoadTy, Value *LoadPtr, LoadInst *DepLI, const DataLayout &DL)
This function determines whether a value for the pointer LoadPtr can be extracted from the load at De...
void addIncoming(Value *V, BasicBlock *BB)
Add an incoming value to the end of the PHI list.
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
static AvailableValue getMI(MemIntrinsic *MI, unsigned Offset=0)
unsigned Offset
Offset - The byte offset in Val that is interesting for the load query.
DiagnosticInfoOptimizationBase::Argument NV
static Type * makeCmpResultType(Type *opnd_type)
Create a result type for fcmp/icmp.
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
This class represents lattice values for constants.
PointerTy getPointer() const
bool isAtomic() const
Return true if this instruction has an AtomicOrdering of unordered or higher.
This is the interface for a simple mod/ref and alias analysis over globals.
void Initialize(Type *Ty, StringRef Name)
Reset this object to get ready for a new set of SSA updates with type 'Ty'.
bool MergeBlockIntoPredecessor(BasicBlock *BB, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr, MemorySSAUpdater *MSSAU=nullptr, MemoryDependenceResults *MemDep=nullptr)
Attempts to merge a block into its predecessor, if possible.
uint32_t lookupOrAddCmp(unsigned Opcode, CmpInst::Predicate Pred, Value *LHS, Value *RHS)
Returns the value number of the given comparison, assigning it a new number if it did not have one be...
void push_back(const T &Elt)
void AddAvailableValue(BasicBlock *BB, Value *V)
Indicate that a rewritten value is available in the specified block with the specified value...
bool operator==(const Expression &other) const
This class represents a function call, abstracting a target machine's calling convention.
bool isNonLocal() const
Tests if this MemDepResult represents a query that is transparent to the start of the block...
An immutable pass that tracks lazily created AssumptionCache objects.
A cache of @llvm.assume calls within a function.
bool salvageDebugInfo(Instruction &I)
Assuming the instruction I is going to be deleted, attempt to salvage debug users of I by writing the...
AtomicOrdering getOrdering() const
Returns the ordering constraint of this load instruction.
uint32_t phiTranslate(const BasicBlock *BB, const BasicBlock *PhiBlock, uint32_t Num, GVN &Gvn)
Wrap phiTranslateImpl to provide caching functionality.
bool isTerminator() const
1 1 1 0 True if unordered or not equal
void deleteValue()
Delete a pointer to a generic Value.
bool hasFnAttribute(Attribute::AttrKind Kind) const
Return true if the function has the attribute.
This class implements a map that also provides access to all stored values in a deterministic order...
BasicBlock * getSuccessor(unsigned i) const
bool properlyDominates(const DomTreeNodeBase< NodeT > *A, const DomTreeNodeBase< NodeT > *B) const
properlyDominates - Returns true iff A dominates B and A != B.
STATISTIC(NumFunctions, "Total number of functions")
Analysis pass which computes a DominatorTree.
bool isCoercedLoadValue() const
An instruction for reading from memory.
const BasicBlock * getEnd() const
Value * getCondition() const
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
This defines the Use class.
unsigned replaceDominatedUsesWith(Value *From, Value *To, DominatorTree &DT, const BasicBlockEdge &Edge)
Replace each use of 'From' with 'To' if that use is dominated by the given edge.
iterator end()
Get an iterator to the end of the SetVector.
Value * getMemInstValueForLoad(MemIntrinsic *SrcInst, unsigned Offset, Type *LoadTy, Instruction *InsertPt, const DataLayout &DL)
If analyzeLoadFromClobberingMemInst returned an offset, this function can be used to actually perform...
LLVMContext & getContext() const
Get the context in which this basic block lives.
gvn Early GVN Hoisting of Expressions
static Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
iterator begin()
Instruction iterator methods.
uint32_t lookup(Value *V, bool Verify=true) const
Returns the value number of the specified value.
Value * getArgOperand(unsigned i) const
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
void dump() const
Support for debugging, callable in GDB: V->dump()
bool match(Val *V, const Pattern &P)
AnalysisUsage & addRequired()
#define INITIALIZE_PASS_DEPENDENCY(depName)
bool isVolatile() const
Return true if this is a load from a volatile memory location.
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - This function should be overriden by passes that need analysis information to do t...
static cl::opt< bool > EnablePRE("enable-pre", cl::init(true), cl::Hidden)
void patchReplacementInstruction(Instruction *I, Value *Repl)
Patch the replacement so that it is not more restrictive than the value being replaced.
bool isDef() const
Tests if this MemDepResult represents a query that is an instruction definition dependency.
const DataLayout & getDataLayout() const
Get the data layout for the module's target platform.
bool runOnFunction(Function &F) override
runOnFunction - Virtual method overriden by subclasses to do the per-function processing of the pass...
Option class for critical edge splitting.
int getBasicBlockIndex(const BasicBlock *BB) const
Return the first index of the specified basic block in the value list for this PHI.
void clear()
Remove all entries from the ValueTable.
bool isClobber() const
Tests if this MemDepResult represents a query that is an instruction clobber dependency.
A Use represents the edge between a Value definition and its users.
PointerType * getPointerTo(unsigned AddrSpace=0) const
Return a pointer to the current type.
int analyzeLoadFromClobberingMemInst(Type *LoadTy, Value *LoadPtr, MemIntrinsic *DepMI, const DataLayout &DL)
This function determines whether a value for the pointer LoadPtr can be extracted from the memory int...
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
MemoryDependenceResults & getMemDep() const
bool isIntegerTy() const
True if this is an instance of IntegerType.
This file contains the simple types necessary to represent the attributes associated with functions a...
An analysis that produces MemoryDependenceResults for a function.
void setName(const Twine &Name)
Change the name of the value.
Analysis pass that exposes the LoopInfo for a function.
static const uint16_t * lookup(unsigned opcode, unsigned domain, ArrayRef< uint16_t[3]> Table)
bool isSimpleValue() const
Interval::succ_iterator succ_begin(Interval *I)
succ_begin/succ_end - define methods so that Intervals may be used just like BasicBlocks can with the...
Instruction * clone() const
Create a copy of 'this' instruction that is identical in all ways except the following: ...
Type * getType() const
All values are typed, get the type of this value.
ppc ctr loops PowerPC CTR Loops Verify
bool insert(const value_type &X)
Insert a new element into the SetVector.
The core GVN pass object.
bool canCoerceMustAliasedValueToLoad(Value *StoredVal, Type *LoadTy, const DataLayout &DL)
Return true if CoerceAvailableValueToLoadType would succeed if it was called.
Expression(uint32_t o=~2U)
iterator begin()
Get an iterator to the beginning of the SetVector.
MDNode * getMetadata(unsigned KindID) const
Get the metadata of given kind attached to this Instruction.
DiagnosticInfoOptimizationBase::setExtraArgs setExtraArgs
static AvailableValue getLoad(LoadInst *LI, unsigned Offset=0)
hash_code hash_value(const APFloat &Arg)
See friend declarations above.
unsigned getOpcode() const
Returns a member of one of the enums like Instruction::Add.
BasicBlock * SplitCriticalEdge(Instruction *TI, unsigned SuccNum, const CriticalEdgeSplittingOptions &Options=CriticalEdgeSplittingOptions())
If this edge is a critical edge, insert a new node to split the critical edge.
LoadInst * getCoercedLoadValue() const
static GVN::Expression getEmptyKey()
An instruction for storing to memory.
bool isMinusOne() const
This function will return true iff every bit in this constant is set to true.
void add(Value *V, uint32_t num)
add - Insert a value into the table with a specified value number.
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
void takeName(Value *V)
Transfer the name from V to this value.
static unsigned getHashValue(const GVN::Expression &e)
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree...
unsigned getNumSuccessors() const
Return the number of successors that this instruction has.
Value * getOperand(unsigned i) const
Interval::succ_iterator succ_end(Interval *I)
int analyzeLoadFromClobberingStore(Type *LoadTy, Value *LoadPtr, StoreInst *DepSI, const DataLayout &DL)
This function determines whether a value for the pointer LoadPtr can be extracted from the store at D...
void initializeGVNLegacyPassPass(PassRegistry &)
bool isVoidTy() const
Return true if this is 'void'.
const BasicBlock & getEntryBlock() const
an instruction for type-safe pointer arithmetic to access elements of arrays and structs ...
void getAAMetadata(AAMDNodes &N, bool Merge=false) const
Fills the AAMDNodes structure with AA metadata from this instruction.
initializer< Ty > init(const Ty &Val)
Value * GetValueInMiddleOfBlock(BasicBlock *BB)
Construct SSA form, materializing a value that is live in the middle of the specified block...
SmallVector< uint32_t, 4 > varargs
uint64_t getZExtValue() const
Return the constant as a 64-bit unsigned integer value after it has been zero extended as appropriate...
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
A set of analyses that are preserved following a run of a transformation pass.
* if(!EatIfPresent(lltok::kw_thread_local)) return false
ParseOptionalThreadLocal := /*empty.
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
const BasicBlock * getSinglePredecessor() const
Return the predecessor of this block if it has a single predecessor block.
void insertBefore(Instruction *InsertPos)
Insert an unlinked instruction into a basic block immediately before the specified instruction...
LLVM Basic Block Representation.
PointerIntPair - This class implements a pair of a pointer and small integer.
PHITransAddr - An address value which tracks and handles phi translation.
The instances of the Type class are immutable: once they are created, they are never changed...
BinaryOp_match< LHS, RHS, Instruction::Or > m_Or(const LHS &L, const RHS &R)
Conditional or Unconditional Branch instruction.
This file provides the interface for LLVM's Global Value Numbering pass which eliminates fully redund...
static GVN::Expression getTombstoneKey()
static Value * ConstructSSAForLoadSet(LoadInst *LI, SmallVectorImpl< AvailableValueInBlock > &ValuesPerBlock, GVN &gvn)
Given a set of loads specified by ValuesPerBlock, construct SSA form, allowing us to eliminate LI...
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
This is an important base class in LLVM.
LLVM_ATTRIBUTE_ALWAYS_INLINE iterator begin()
static bool isEqual(const GVN::Expression &LHS, const GVN::Expression &RHS)
This file contains the declarations for the subclasses of Constant, which represent the different fla...
static cl::opt< uint32_t > MaxRecurseDepth("gvn-max-recurse-depth", cl::Hidden, cl::init(1000), cl::ZeroOrMore, cl::desc("Max recurse depth in GVN (default = 1000)"))
const Instruction & front() const
A manager for alias analyses.
bool mayHaveSideEffects() const
Return true if the instruction may have side effects.
Interval::pred_iterator pred_begin(Interval *I)
pred_begin/pred_end - define methods so that Intervals may be used just like BasicBlocks can with the...
Represent the analysis usage information of a pass.
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly...
Analysis pass providing a never-invalidated alias analysis result.
Predicate
This enumeration lists the possible predicates for CmpInst subclasses.
PointerIntPair< Value *, 2, ValType > Val
V - The value that is live out of the block.
MemIntrinsic * getMemIntrinValue() const
FunctionPass class - This class is used to implement most global optimizations.
Interval::pred_iterator pred_end(Interval *I)
Value * getPointerOperand()
bool isCriticalEdge(const Instruction *TI, unsigned SuccNum, bool AllowIdenticalEdges=false)
Return true if the specified edge is a critical edge.
Value * getLoadValueForLoad(LoadInst *SrcVal, unsigned Offset, Type *LoadTy, Instruction *InsertPt, const DataLayout &DL)
If analyzeLoadFromClobberingLoad returned an offset, this function can be used to actually perform th...
static void reportLoadElim(LoadInst *LI, Value *AvailableValue, OptimizationRemarkEmitter *ORE)
static UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
static cl::opt< bool > EnableMemDep("enable-gvn-memdep", cl::init(true))
A wrapper analysis pass for the legacy pass manager that exposes a MemoryDepnedenceResults instance...
void printAsOperand(raw_ostream &O, bool PrintType=true, const Module *M=nullptr) const
Print the name of this Value out to the specified raw_ostream.
INITIALIZE_PASS_END(RegBankSelect, DEBUG_TYPE, "Assign register bank of generic virtual registers", false, false) RegBankSelect
A memory dependence query can return one of three different answers.
DominatorTree & getDominatorTree() const
static cl::opt< uint32_t > MaxNumDeps("gvn-max-num-deps", cl::Hidden, cl::init(100), cl::ZeroOrMore, cl::desc("Max number of dependences to attempt Load PRE (default = 100)"))
Intrinsic::ID getIntrinsicID() const
Return the intrinsic ID of this intrinsic.
static void reportMayClobberedLoad(LoadInst *LI, MemDepResult DepInfo, DominatorTree *DT, OptimizationRemarkEmitter *ORE)
Try to locate the three instruction involved in a missed load-elimination case that is due to an inte...
A function analysis which provides an AssumptionCache.
bool isPtrOrPtrVectorTy() const
Return true if this is a pointer type or a vector of pointer types.
Value * MaterializeAdjustedValue(LoadInst *LI, GVN &gvn) const
Emit code at the end of this block to adjust the value defined here to the specified type...
A SetVector that performs no allocations if smaller than a certain size.
This is the common base class for memset/memcpy/memmove.
Iterator for intrusive lists based on ilist_node.
unsigned getNumOperands() const
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements...
This is the shared class of boolean and integer constants.
bool dominates(const Instruction *Def, const Use &U) const
Return true if Def dominates a use in User.
Module.h This file contains the declarations for the Module class.
Provides information about what library functions are available for the current target.
const MemDepResult & getResult() const
size_type count(const KeyT &Key) const
BasicBlock * getBB() const
#define INITIALIZE_PASS_BEGIN(passName, arg, name, cfg, analysis)
A collection of metadata nodes that might be associated with a memory access used by the alias-analys...
LLVM_NODISCARD T pop_back_val()
static GCRegistry::Add< StatepointGC > D("statepoint-example", "an example strategy for statepoint")
static Constant * get(Type *Ty, uint64_t V, bool isSigned=false)
If Ty is a vector type, return a Constant with a splat of the given value.
static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr="", Instruction *InsertBefore=nullptr)
Constructors - NumReservedValues is a hint for the number of incoming edges that this phi node will h...
pred_range predecessors(BasicBlock *BB)
Value * MaterializeAdjustedValue(LoadInst *LI, Instruction *InsertPt, GVN &gvn) const
Emit code at the specified insertion point to adjust the value defined here to the specified type...
static ConstantInt * getTrue(LLVMContext &Context)
bool isCommutative() const
Return true if the instruction is commutative:
void setOperand(unsigned i, Value *Val)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
const Module * getModule() const
Return the module owning the function this instruction belongs to or nullptr it the function does not...
hash_code hash_combine(const Ts &...args)
Combine values into a single hash_code.
Represents an AvailableValue which can be rematerialized at the end of the associated BasicBlock...
iterator_range< user_iterator > users()
hash_code hash_combine_range(InputIteratorT first, InputIteratorT last)
Compute a hash_code for a sequence of values.
std::vector< NonLocalDepEntry > NonLocalDepInfo
An opaque object representing a hash code.
bool isMallocLikeFn(const Value *V, const TargetLibraryInfo *TLI, bool LookThroughBitCast=false)
Tests if a value is a call or invoke to a library function that allocates uninitialized memory (such ...
iterator insert(iterator I, T &&Elt)
void verifyRemoved(const Value *) const
verifyRemoved - Verify that the value is removed from all internal data structures.
void append(in_iter in_start, in_iter in_end)
Add the specified range to the end of the SmallVector.
void erase(Value *v)
Remove a value from the value numbering.
static bool isLifetimeStart(const Instruction *Inst)
static bool isZero(Value *V, const DataLayout &DL, DominatorTree *DT, AssumptionCache *AC)
LLVM_ATTRIBUTE_ALWAYS_INLINE iterator end()
unsigned GetSuccessorNumber(const BasicBlock *BB, const BasicBlock *Succ)
Search for the specified successor of basic block BB and return its position in the terminator instru...
unsigned getNumArgOperands() const
const DebugLoc & getDebugLoc() const
Return the debug location for this node as a DebugLoc.
unsigned getAlignment() const
Return the alignment of the access that is being performed.
Instruction * getInst() const
If this is a normal dependency, returns the instruction that is depended on.
Value * getStoreValueForLoad(Value *SrcVal, unsigned Offset, Type *LoadTy, Instruction *InsertPt, const DataLayout &DL)
If analyzeLoadFromClobberingStore returned an offset, this function can be used to actually perform t...
LLVM_NODISCARD bool empty() const
GVNLegacyPass(bool NoMemDepAnalysis=!EnableMemDep)
StringRef getName() const
Return a constant reference to the value's name.
const Function * getParent() const
Return the enclosing method, or null if none.
bool isCallocLikeFn(const Value *V, const TargetLibraryInfo *TLI, bool LookThroughBitCast=false)
Tests if a value is a call or invoke to a library function that allocates zero-filled memory (such as...
SyncScope::ID getSyncScopeID() const
Returns the synchronization scope ID of this load instruction.
bool mayReadFromMemory() const
Return true if this instruction may read memory.
static AvailableValue get(Value *V, unsigned Offset=0)
PassT::Result * getCachedResult(IRUnitT &IR) const
Get the cached result of an analysis pass for a given IR unit.
bool exists(Value *V) const
Returns true if a value number exists for the specified value.
LLVM_NODISCARD std::enable_if<!is_simple_type< Y >::value, typename cast_retty< X, const Y >::ret_type >::type dyn_cast(const Y &Val)
void preserve()
Mark an analysis as preserved.
This class allows to keep track on instructions with implicit control flow.
bool isUnconditional() const
friend hash_code hash_value(const Expression &Value)
uint32_t lookupOrAdd(Value *V)
lookup_or_add - Returns the value number for the specified value, assigning it a new number if it did...
ValueT lookup(const_arg_type_t< KeyT > Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
Value * getSimpleValue() const
Analysis pass providing the TargetLibraryInfo.
iterator_range< df_iterator< T > > depth_first(const T &G)
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
const BasicBlock * getStart() const
Represents a particular available value that we know how to materialize.
bool isSafeToSpeculativelyExecute(const Value *V, const Instruction *CtxI=nullptr, const DominatorTree *DT=nullptr)
Return true if the instruction does not have any effects besides calculating the result and does not ...
static bool IsValueFullyAvailableInBlock(BasicBlock *BB, DenseMap< BasicBlock *, char > &FullyAvailableBlocks, uint32_t RecurseDepth)
Return true if we can prove that the value we're analyzing is fully available in the specified block...
0 0 0 1 True if ordered and equal
bool isInstructionTriviallyDead(Instruction *I, const TargetLibraryInfo *TLI=nullptr)
Return true if the result produced by the instruction is not used, and the instruction has no side ef...
LLVM Value Representation.
static AvailableValueInBlock getUndef(BasicBlock *BB)
void removeInstruction(Instruction *InstToRemove)
Removes an instruction from the dependence analysis, updating the dependence of instructions that pre...
succ_range successors(Instruction *I)
PreservedAnalyses run(Function &F, FunctionAnalysisManager &AM)
Run the pass over the function.
bool hasOneUse() const
Return true if there is exactly one user of this value.
Predicate getSwappedPredicate() const
For example, EQ->EQ, SLE->SGE, ULT->UGT, OEQ->OEQ, ULE->UGE, OLT->OGT, etc.
This is an entry in the NonLocalDepInfo cache.
A container for analyses that lazily runs them and caches their results.
BasicBlock * BB
BB - The basic block in question.
static void patchAndReplaceAllUsesWith(Instruction *I, Value *Repl)
Legacy analysis pass which computes a DominatorTree.
bool isMemIntrinValue() const
A wrapper pass to provide the legacy pass manager access to a suitably prepared AAResults object...
This header defines various interfaces for pass management in LLVM.
void setIncomingValue(unsigned i, Value *V)
AvailableValue AV
AV - The actual available value.
Value * SimplifyInstruction(Instruction *I, const SimplifyQuery &Q, OptimizationRemarkEmitter *ORE=nullptr)
See if we can compute a simplified version of this instruction.
static IntegerType * getInt8Ty(LLVMContext &C)
static AvailableValue getUndef()
static bool isOnlyReachableViaThisEdge(const BasicBlockEdge &E, DominatorTree *DT)
There is an edge from 'Src' to 'Dst'.
A wrapper class for inspecting calls to intrinsic functions.
const BasicBlock * getParent() const
This instruction inserts a struct field of array element value into an aggregate value.
bool HasValueForBlock(BasicBlock *BB) const
Return true if the SSAUpdater already has a value for the specified block.