132 #define DEBUG_TYPE "infer-address-spaces" 134 using namespace llvm;
147 unsigned FlatAddrSpace;
164 updateAddressSpace(
const Value &V,
165 const ValueToAddrSpaceMapTy &InferredAddrSpace)
const;
170 ValueToAddrSpaceMapTy *InferredAddrSpace)
const;
172 bool isSafeToCastConstAddrSpace(
Constant *
C,
unsigned NewAS)
const;
177 bool rewriteWithNewAddressSpaces(
179 const ValueToAddrSpaceMapTy &InferredAddrSpace,
Function *
F)
const;
181 void appendsFlatAddressExpressionToPostorderStack(
182 Value *V, std::vector<std::pair<Value *, bool>> &PostorderStack,
187 void collectRewritableIntrinsicOperands(
189 std::vector<std::pair<Value *, bool>> &PostorderStack,
192 std::vector<WeakTrackingVH> collectFlatAddressExpressions(
Function &
F)
const;
194 Value *cloneValueWithNewAddressSpace(
195 Value *V,
unsigned NewAddrSpace,
198 unsigned joinAddressSpaces(
unsigned AS1,
unsigned AS2)
const;
217 static
bool isAddressExpression(
const Value &V) {
218 if (!isa<Operator>(V))
222 case Instruction::PHI:
223 case Instruction::BitCast:
224 case Instruction::AddrSpaceCast:
225 case Instruction::GetElementPtr:
239 case Instruction::PHI: {
240 auto IncomingValues = cast<PHINode>(
Op).incoming_values();
242 IncomingValues.end());
244 case Instruction::BitCast:
245 case Instruction::AddrSpaceCast:
246 case Instruction::GetElementPtr:
256 bool InferAddressSpaces::rewriteIntrinsicOperands(
IntrinsicInst *II,
268 if (!IsVolatile || !IsVolatile->
isZero())
288 void InferAddressSpaces::collectRewritableIntrinsicOperands(
289 IntrinsicInst *II, std::vector<std::pair<Value *, bool>> &PostorderStack,
298 appendsFlatAddressExpressionToPostorderStack(II->
getArgOperand(0),
299 PostorderStack, Visited);
309 void InferAddressSpaces::appendsFlatAddressExpressionToPostorderStack(
310 Value *V, std::vector<std::pair<Value *, bool>> &PostorderStack,
318 if (isAddressExpression(*CE) && Visited.
insert(CE).second)
319 PostorderStack.push_back(std::make_pair(CE,
false));
324 if (isAddressExpression(*V) &&
326 if (Visited.
insert(V).second) {
327 PostorderStack.push_back(std::make_pair(V,
false));
332 if (isAddressExpression(*CE) && Visited.
insert(CE).second)
333 PostorderStack.emplace_back(CE,
false);
342 std::vector<WeakTrackingVH>
343 InferAddressSpaces::collectFlatAddressExpressions(
Function &
F)
const {
346 std::vector<std::pair<Value *, bool>> PostorderStack;
350 auto PushPtrOperand = [&](
Value *Ptr) {
351 appendsFlatAddressExpressionToPostorderStack(Ptr, PostorderStack,
359 if (
auto *
GEP = dyn_cast<GetElementPtrInst>(&
I)) {
360 if (!
GEP->getType()->isVectorTy())
361 PushPtrOperand(
GEP->getPointerOperand());
362 }
else if (
auto *LI = dyn_cast<LoadInst>(&
I))
363 PushPtrOperand(LI->getPointerOperand());
364 else if (
auto *
SI = dyn_cast<StoreInst>(&
I))
365 PushPtrOperand(
SI->getPointerOperand());
366 else if (
auto *RMW = dyn_cast<AtomicRMWInst>(&
I))
367 PushPtrOperand(RMW->getPointerOperand());
368 else if (
auto *CmpX = dyn_cast<AtomicCmpXchgInst>(&
I))
369 PushPtrOperand(CmpX->getPointerOperand());
370 else if (
auto *
MI = dyn_cast<MemIntrinsic>(&
I)) {
372 PushPtrOperand(
MI->getRawDest());
375 if (
auto *MTI = dyn_cast<MemTransferInst>(
MI))
376 PushPtrOperand(MTI->getRawSource());
377 }
else if (
auto *II = dyn_cast<IntrinsicInst>(&
I))
378 collectRewritableIntrinsicOperands(II, PostorderStack, Visited);
379 else if (
ICmpInst *Cmp = dyn_cast<ICmpInst>(&
I)) {
381 if (Cmp->getOperand(0)->getType()->isPointerTy()) {
382 PushPtrOperand(Cmp->getOperand(0));
383 PushPtrOperand(Cmp->getOperand(1));
385 }
else if (
auto *ASC = dyn_cast<AddrSpaceCastInst>(&
I)) {
386 if (!ASC->getType()->isVectorTy())
387 PushPtrOperand(ASC->getPointerOperand());
391 std::vector<WeakTrackingVH> Postorder;
392 while (!PostorderStack.empty()) {
393 Value *TopVal = PostorderStack.back().first;
396 if (PostorderStack.back().second) {
398 Postorder.push_back(TopVal);
399 PostorderStack.pop_back();
403 PostorderStack.back().second =
true;
405 appendsFlatAddressExpressionToPostorderStack(PtrOperand, PostorderStack,
416 const Use &OperandUse,
unsigned NewAddrSpace,
424 if (
Constant *
C = dyn_cast<Constant>(Operand))
427 if (
Value *NewOperand = ValueWithNewAddrSpace.
lookup(Operand))
450 if (I->
getOpcode() == Instruction::AddrSpaceCast) {
456 if (Src->
getType() != NewPtrType)
464 if (!OperandUse.get()->getType()->isPointerTy())
468 OperandUse, NewAddrSpace, ValueWithNewAddrSpace, UndefUsesToFix));
472 case Instruction::BitCast:
473 return new BitCastInst(NewPointerOperands[0], NewPtrType);
474 case Instruction::PHI: {
478 for (
unsigned Index = 0;
Index < PHI->getNumIncomingValues(); ++
Index) {
480 NewPHI->addIncoming(NewPointerOperands[OperandNo],
481 PHI->getIncomingBlock(
Index));
485 case Instruction::GetElementPtr: {
496 NewPointerOperands[2],
"",
nullptr,
I);
511 if (CE->
getOpcode() == Instruction::AddrSpaceCast) {
520 if (CE->
getOpcode() == Instruction::BitCast) {
548 if (
Value *NewOperand = ValueWithNewAddrSpace.
lookup(Operand)) {
550 NewOperands.
push_back(cast<Constant>(NewOperand));
562 if (CE->
getOpcode() == Instruction::GetElementPtr) {
566 NewOperands, TargetType,
false,
567 NewOperands[0]->
getType()->getPointerElementType());
578 Value *InferAddressSpaces::cloneValueWithNewAddressSpace(
579 Value *V,
unsigned NewAddrSpace,
583 assert(isAddressExpression(*V) &&
588 I, NewAddrSpace, ValueWithNewAddrSpace, UndefUsesToFix);
589 if (
Instruction *NewI = dyn_cast<Instruction>(NewV)) {
590 if (NewI->getParent() ==
nullptr) {
591 NewI->insertBefore(
I);
599 cast<ConstantExpr>(V), NewAddrSpace, ValueWithNewAddrSpace);
604 unsigned InferAddressSpaces::joinAddressSpaces(
unsigned AS1,
605 unsigned AS2)
const {
606 if (AS1 == FlatAddrSpace || AS2 == FlatAddrSpace)
607 return FlatAddrSpace;
609 if (AS1 == UninitializedAddressSpace)
611 if (AS2 == UninitializedAddressSpace)
615 return (AS1 == AS2) ? AS1 : FlatAddrSpace;
623 getAnalysis<TargetTransformInfoWrapperPass>().getTTI(F);
625 if (FlatAddrSpace == UninitializedAddressSpace)
629 std::vector<WeakTrackingVH> Postorder = collectFlatAddressExpressions(F);
633 ValueToAddrSpaceMapTy InferredAddrSpace;
634 inferAddressSpaces(Postorder, &InferredAddrSpace);
638 return rewriteWithNewAddressSpaces(TTI, Postorder, InferredAddrSpace, &F);
643 void InferAddressSpaces::inferAddressSpaces(
645 ValueToAddrSpaceMapTy *InferredAddrSpace)
const {
648 for (
Value *V : Postorder)
651 while (!Worklist.empty()) {
652 Value *V = Worklist.pop_back_val();
656 LLVM_DEBUG(
dbgs() <<
"Updating the address space of\n " << *V <<
'\n');
663 (*InferredAddrSpace)[V] = NewAS.
getValue();
667 if (Worklist.count(
User))
670 auto Pos = InferredAddrSpace->find(
User);
673 if (Pos == InferredAddrSpace->end())
679 if (Pos->second == FlatAddrSpace)
682 Worklist.insert(
User);
688 const Value &V,
const ValueToAddrSpaceMapTy &InferredAddrSpace)
const {
689 assert(InferredAddrSpace.count(&V));
700 auto I = InferredAddrSpace.find(Src0);
701 unsigned Src0AS = (
I != InferredAddrSpace.end()) ?
704 auto J = InferredAddrSpace.find(Src1);
705 unsigned Src1AS = (J != InferredAddrSpace.end()) ?
714 if ((C1 && Src0AS == UninitializedAddressSpace) ||
718 if (C0 && isSafeToCastConstAddrSpace(C0, Src1AS))
720 else if (C1 && isSafeToCastConstAddrSpace(C1, Src0AS))
723 NewAS = joinAddressSpaces(Src0AS, Src1AS);
726 auto I = InferredAddrSpace.find(PtrOperand);
727 unsigned OperandAS =
I != InferredAddrSpace.end() ?
728 I->second : PtrOperand->getType()->getPointerAddressSpace();
731 NewAS = joinAddressSpaces(NewAS, OperandAS);
732 if (NewAS == FlatAddrSpace)
737 unsigned OldAS = InferredAddrSpace.lookup(&V);
738 assert(OldAS != FlatAddrSpace);
750 Use &U,
unsigned AddrSpace) {
753 bool VolatileIsAllowed =
false;
754 if (
auto *
I = dyn_cast<Instruction>(Inst))
757 if (
auto *LI = dyn_cast<LoadInst>(Inst))
759 (VolatileIsAllowed || !LI->isVolatile());
761 if (
auto *
SI = dyn_cast<StoreInst>(Inst))
763 (VolatileIsAllowed || !
SI->isVolatile());
765 if (
auto *RMW = dyn_cast<AtomicRMWInst>(Inst))
767 (VolatileIsAllowed || !RMW->isVolatile());
769 if (
auto *CmpX = dyn_cast<AtomicCmpXchgInst>(Inst))
771 (VolatileIsAllowed || !CmpX->isVolatile());
786 if (
auto *MSI = dyn_cast<MemSetInst>(MI)) {
788 MSI->getLength(), MSI->getDestAlignment(),
790 TBAA, ScopeMD, NoAliasMD);
791 }
else if (
auto *MTI = dyn_cast<MemTransferInst>(MI)) {
792 Value *Src = MTI->getRawSource();
793 Value *Dest = MTI->getRawDest();
802 if (isa<MemCpyInst>(MTI)) {
805 Src, MTI->getSourceAlignment(),
808 TBAA, TBAAStruct, ScopeMD, NoAliasMD);
810 assert(isa<MemMoveInst>(MTI));
812 Src, MTI->getSourceAlignment(),
815 TBAA, ScopeMD, NoAliasMD);
826 bool InferAddressSpaces::isSafeToCastConstAddrSpace(
Constant *
C,
unsigned NewAS)
const {
827 assert(NewAS != UninitializedAddressSpace);
830 if (SrcAS == NewAS || isa<UndefValue>(C))
834 if (SrcAS != FlatAddrSpace && NewAS != FlatAddrSpace)
837 if (isa<ConstantPointerNull>(C))
840 if (
auto *
Op = dyn_cast<Operator>(C)) {
843 if (
Op->getOpcode() == Instruction::AddrSpaceCast)
844 return isSafeToCastConstAddrSpace(cast<Constant>(
Op->getOperand(0)), NewAS);
846 if (
Op->getOpcode() == Instruction::IntToPtr &&
847 Op->getType()->getPointerAddressSpace() == FlatAddrSpace)
856 User *CurUser = I->getUser();
859 while (I != End && I->getUser() == CurUser)
865 bool InferAddressSpaces::rewriteWithNewAddressSpaces(
867 const ValueToAddrSpaceMapTy &InferredAddrSpace,
Function *F)
const {
874 for (
Value* V : Postorder) {
875 unsigned NewAddrSpace = InferredAddrSpace.lookup(V);
877 ValueWithNewAddrSpace[V] = cloneValueWithNewAddressSpace(
878 V, NewAddrSpace, ValueWithNewAddrSpace, &UndefUsesToFix);
882 if (ValueWithNewAddrSpace.
empty())
886 for (
const Use *UndefUse : UndefUsesToFix) {
887 User *V = UndefUse->getUser();
888 User *NewV = cast<User>(ValueWithNewAddrSpace.
lookup(V));
889 unsigned OperandNo = UndefUse->getOperandNo();
898 assert(WVH &&
"value was unexpectedly deleted");
904 LLVM_DEBUG(
dbgs() <<
"Replacing the uses of " << *V <<
"\n with\n " 907 if (
Constant *C = dyn_cast<Constant>(V)) {
911 LLVM_DEBUG(
dbgs() <<
"Inserting replacement const cast: " << Replace
912 <<
": " << *Replace <<
'\n');
937 if (
auto *
MI = dyn_cast<MemIntrinsic>(CurUser)) {
942 if (
auto *II = dyn_cast<IntrinsicInst>(CurUser)) {
943 if (rewriteIntrinsicOperands(II, V, NewV))
947 if (isa<Instruction>(CurUser)) {
948 if (
ICmpInst *Cmp = dyn_cast<ICmpInst>(CurUser)) {
957 int OtherIdx = (SrcIdx == 0) ? 1 : 0;
958 Value *OtherSrc = Cmp->getOperand(OtherIdx);
960 if (
Value *OtherNewV = ValueWithNewAddrSpace.
lookup(OtherSrc)) {
961 if (OtherNewV->getType()->getPointerAddressSpace() == NewAS) {
962 Cmp->setOperand(OtherIdx, OtherNewV);
963 Cmp->setOperand(SrcIdx, NewV);
969 if (
auto *KOtherSrc = dyn_cast<Constant>(OtherSrc)) {
970 if (isSafeToCastConstAddrSpace(KOtherSrc, NewAS)) {
971 Cmp->setOperand(SrcIdx, NewV);
972 Cmp->setOperand(OtherIdx,
981 if (ASC->getDestAddressSpace() == NewAS) {
982 if (ASC->getType()->getPointerElementType() !=
996 while (isa<PHINode>(InsertPos))
1019 return new InferAddressSpaces();
static bool handleMemIntrinsicPtrUse(MemIntrinsic *MI, Value *OldV, Value *NewV)
Update memory intrinsic uses that require more complex processing than simple memory instructions...
SymbolTableList< Instruction >::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
unsigned getOpcode() const
Return the opcode at the root of this constant expression.
GCNRegPressure max(const GCNRegPressure &P1, const GCNRegPressure &P2)
This class represents lattice values for constants.
A Module instance is used to store all the information related to an LLVM module. ...
constexpr char IsVolatile[]
Key for Kernel::Arg::Metadata::mIsVolatile.
void push_back(const T &Elt)
static Constant * getAddrSpaceCast(Constant *C, Type *Ty, bool OnlyIfReduced=false)
static GetElementPtrInst * Create(Type *PointeeType, Value *Ptr, ArrayRef< Value *> IdxList, const Twine &NameStr="", Instruction *InsertBefore=nullptr)
static SelectInst * Create(Value *C, Value *S1, Value *S2, const Twine &NameStr="", Instruction *InsertBefore=nullptr, Instruction *MDFrom=nullptr)
void setArgOperand(unsigned i, Value *v)
unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
CallInst * CreateMemSet(Value *Ptr, Value *Val, uint64_t Size, unsigned Align, bool isVolatile=false, MDNode *TBAATag=nullptr, MDNode *ScopeTag=nullptr, MDNode *NoAliasTag=nullptr)
Create and insert a memset to the specified pointer and the specified value.
This defines the Use class.
Value * getArgOperand(unsigned i) const
static unsigned getOperandNumForIncomingValue(unsigned i)
AnalysisUsage & addRequired()
This class represents a conversion between pointers from one address space to another.
Type * getPointerElementType() const
A Use represents the edge between a Value definition and its users.
PointerType * getPointerTo(unsigned AddrSpace=0) const
Return a pointer to the current type.
static Optional< unsigned > getOpcode(ArrayRef< VPValue *> Values)
Returns the opcode of Values or ~0 if they do not all agree.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
void setCalledFunction(Value *Fn)
Sets the function called, including updating the function type.
CallInst * CreateMemMove(Value *Dst, unsigned DstAlign, Value *Src, unsigned SrcAlign, uint64_t Size, bool isVolatile=false, MDNode *TBAATag=nullptr, MDNode *ScopeTag=nullptr, MDNode *NoAliasTag=nullptr)
Create and insert a memmove between the specified pointers.
Type * getSourceElementType() const
User * getUser() const LLVM_READONLY
Returns the User that contains this Use.
A constant value that is initialized with an expression using other constant values.
Type * getType() const
All values are typed, get the type of this value.
Value handle that is nullable, but tries to track the Value.
bool isInBounds() const
Determine whether the GEP has the inbounds flag.
const T & getValue() const LLVM_LVALUE_FUNCTION
static Constant * getSelect(Constant *C, Constant *V1, Constant *V2, Type *OnlyIfReducedTy=nullptr)
Select constant expr.
This class represents a no-op cast from one type to another.
MDNode * getMetadata(unsigned KindID) const
Get the metadata of given kind attached to this Instruction.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory)...
unsigned getOpcode() const
Returns a member of one of the enums like Instruction::Add.
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
unsigned getOperandNo() const
Return the operand # of this use in its User.
ValueT lookup(const KeyT &Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
Function * getDeclaration(Module *M, ID id, ArrayRef< Type *> Tys=None)
Create or insert an LLVM Function declaration for an intrinsic, and return it.
Value * getOperand(unsigned i) const
use_iterator_impl< Use > use_iterator
static Constant * getBitCast(Constant *C, Type *Ty, bool OnlyIfReduced=false)
an instruction for type-safe pointer arithmetic to access elements of arrays and structs ...
static bool runOnFunction(Function &F, bool PostInlining)
static unsigned getPointerOperandIndex()
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
The instances of the Type class are immutable: once they are created, they are never changed...
static unsigned getPointerOperandIndex()
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
This is an important base class in LLVM.
This file contains the declarations for the subclasses of Constant, which represent the different fla...
bool isPointerTy() const
True if this is an instance of PointerType.
std::pair< iterator, bool > insert(const ValueT &V)
Represent the analysis usage information of a pass.
This instruction compares its operands according to the predicate given to the constructor.
FunctionPass class - This class is used to implement most global optimizations.
void initializeInferAddressSpacesPass(PassRegistry &)
bool RecursivelyDeleteTriviallyDeadInstructions(Value *V, const TargetLibraryInfo *TLI=nullptr, MemorySSAUpdater *MSSAU=nullptr)
If the specified value is a trivially dead instruction, delete it.
static UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
static wasm::ValType getType(const TargetRegisterClass *RC)
Constant * getWithOperands(ArrayRef< Constant *> Ops) const
This returns the current constant expression with the operands replaced with the specified values...
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
Intrinsic::ID getIntrinsicID() const
Return the intrinsic ID of this intrinsic.
static Value * cloneConstantExprWithNewAddressSpace(ConstantExpr *CE, unsigned NewAddrSpace, const ValueToValueMapTy &ValueWithNewAddrSpace)
This is the common base class for memset/memcpy/memmove.
Iterator for intrusive lists based on ilist_node.
unsigned getNumOperands() const
This is the shared class of boolean and integer constants.
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small...
This is a utility class that provides an abstraction for the common functionality between Instruction...
static Value * cloneInstructionWithNewAddressSpace(Instruction *I, unsigned NewAddrSpace, const ValueToValueMapTy &ValueWithNewAddrSpace, SmallVectorImpl< const Use *> *UndefUsesToFix)
CallInst * CreateMemCpy(Value *Dst, unsigned DstAlign, Value *Src, unsigned SrcAlign, uint64_t Size, bool isVolatile=false, MDNode *TBAATag=nullptr, MDNode *TBAAStructTag=nullptr, MDNode *ScopeTag=nullptr, MDNode *NoAliasTag=nullptr)
Create and insert a memcpy between the specified pointers.
static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr="", Instruction *InsertBefore=nullptr)
Constructors - NumReservedValues is a hint for the number of incoming edges that this phi node will h...
void setPreservesCFG()
This function should be called by the pass, iff they do not:
static const unsigned UninitializedAddressSpace
void setOperand(unsigned i, Value *Val)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
iterator_range< user_iterator > users()
INITIALIZE_PASS(InferAddressSpaces, DEBUG_TYPE, "Infer address spaces", false, false) static bool isAddressExpression(const Value &V)
static CastInst * Create(Instruction::CastOps, Value *S, Type *Ty, const Twine &Name="", Instruction *InsertBefore=nullptr)
Provides a way to construct any of the CastInst subclasses using an opcode instead of the subclass's ...
FunctionPass * createInferAddressSpacesPass()
const Function * getParent() const
Return the enclosing method, or null if none.
bool isZero() const
This is just a convenience method to make client code smaller for a common code.
LLVM_NODISCARD std::enable_if<!is_simple_type< Y >::value, typename cast_retty< X, const Y >::ret_type >::type dyn_cast(const Y &Val)
static unsigned getPointerOperandIndex()
static unsigned getPointerOperandIndex()
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
Module * getParent()
Get the module that this global value is contained inside of...
LLVM Value Representation.
unsigned getOpcode() const
Return the opcode for this Instruction or ConstantExpr.
static SmallVector< Value *, 2 > getPointerOperands(const Value &V)
static Value::use_iterator skipToNextUser(Value::use_iterator I, Value::use_iterator End)
inst_range instructions(Function *F)
PassRegistry - This class manages the registration and intitialization of the pass subsystem as appli...
static bool isSimplePointerUseValidToReplace(const TargetTransformInfo &TTI, Use &U, unsigned AddrSpace)
returns true if U is the pointer operand of a memory instruction with a single pointer operand that c...
A wrapper class for inspecting calls to intrinsic functions.
const BasicBlock * getParent() const
static Value * operandWithNewAddressSpaceOrCreateUndef(const Use &OperandUse, unsigned NewAddrSpace, const ValueToValueMapTy &ValueWithNewAddrSpace, SmallVectorImpl< const Use *> *UndefUsesToFix)