30 using namespace TargetOpcode;
50 getActionDefinitionsBuilder(G_IMPLICIT_DEF)
51 .legalFor({p0, s1, s8, s16, s32, s64, v2s64})
52 .clampScalar(0, s1, s64)
53 .widenScalarToNextPow2(0, 8)
56 return Query.
Types[0].isVector() &&
57 (Query.
Types[0].getElementType() != s64 ||
58 Query.
Types[0].getNumElements() != 2);
61 LLT EltTy = Query.
Types[0].getElementType();
64 return std::make_pair(0, EltTy);
67 getActionDefinitionsBuilder(G_PHI)
68 .legalFor({p0, s16, s32, s64})
69 .clampScalar(0, s16, s64)
70 .widenScalarToNextPow2(0);
72 getActionDefinitionsBuilder(G_BSWAP)
74 .clampScalar(0, s16, s64)
75 .widenScalarToNextPow2(0);
77 getActionDefinitionsBuilder({G_ADD, G_SUB, G_MUL, G_AND, G_OR, G_XOR, G_SHL})
78 .legalFor({s32, s64, v2s32, v4s32, v2s64})
79 .clampScalar(0, s32, s64)
80 .widenScalarToNextPow2(0)
81 .clampNumElements(0, v2s32, v4s32)
82 .clampNumElements(0, v2s64, v2s64)
83 .moreElementsToNextPow2(0);
85 getActionDefinitionsBuilder(G_GEP)
86 .legalFor({{p0, s64}})
87 .clampScalar(1, s64, s64);
89 getActionDefinitionsBuilder(G_PTR_MASK).legalFor({p0});
91 getActionDefinitionsBuilder({G_LSHR, G_ASHR, G_SDIV, G_UDIV})
93 .clampScalar(0, s32, s64)
94 .widenScalarToNextPow2(0);
96 getActionDefinitionsBuilder({G_SREM, G_UREM})
97 .lowerFor({s1, s8, s16, s32, s64});
99 getActionDefinitionsBuilder({G_SMULO, G_UMULO})
100 .lowerFor({{s64, s1}});
102 getActionDefinitionsBuilder({G_SMULH, G_UMULH}).legalFor({s32, s64});
104 getActionDefinitionsBuilder({G_UADDE, G_USUBE, G_SADDO, G_SSUBO})
105 .legalFor({{s32, s1}, {s64, s1}});
107 getActionDefinitionsBuilder({G_FADD, G_FSUB, G_FMA, G_FMUL, G_FDIV})
108 .legalFor({s32, s64});
110 getActionDefinitionsBuilder({G_FREM, G_FPOW}).libcallFor({s32, s64});
112 getActionDefinitionsBuilder(G_FCEIL)
120 .legalFor({s16, s32, s64, v2s32, v4s32, v2s64});
122 getActionDefinitionsBuilder(G_INSERT)
124 return Query.
Types[0].getSizeInBits() <= Query.
Types[1].getSizeInBits();
129 if (Ty0 != s32 && Ty0 != s64 && Ty0 != p0)
134 .clampScalar(0, s32, s64)
135 .widenScalarToNextPow2(0)
136 .maxScalarIf(
typeInSet(0, {s32}), 1, s16)
137 .maxScalarIf(
typeInSet(0, {s64}), 1, s32)
138 .widenScalarToNextPow2(1);
140 getActionDefinitionsBuilder(G_EXTRACT)
142 return Query.
Types[0].getSizeInBits() >= Query.
Types[1].getSizeInBits();
147 if (Ty1 != s32 && Ty1 != s64)
154 .clampScalar(1, s32, s64)
155 .widenScalarToNextPow2(1)
156 .maxScalarIf(
typeInSet(1, {s32}), 0, s16)
157 .maxScalarIf(
typeInSet(1, {s64}), 0, s32)
158 .widenScalarToNextPow2(0);
160 getActionDefinitionsBuilder({G_SEXTLOAD, G_ZEXTLOAD})
161 .legalForTypesWithMemSize({{s32, p0, 8},
167 .clampScalar(0, s32, s64)
168 .widenScalarToNextPow2(0)
171 .unsupportedIfMemSizeNotPow2()
175 getActionDefinitionsBuilder(G_LOAD)
176 .legalForTypesWithMemSize({{s8, p0, 8},
183 .legalForTypesWithMemSize({{s32, p0, 8},
185 .clampScalar(0, s8, s64)
186 .widenScalarToNextPow2(0)
189 .unsupportedIfMemSizeNotPow2()
192 return Query.
Types[0].getSizeInBits() != Query.
MMODescrs[0].SizeInBits;
194 .clampNumElements(0, v2s32, v2s32)
195 .clampMaxNumElements(0, s64, 1);
197 getActionDefinitionsBuilder(G_STORE)
198 .legalForTypesWithMemSize({{s8, p0, 8},
204 .clampScalar(0, s8, s64)
205 .widenScalarToNextPow2(0)
208 .unsupportedIfMemSizeNotPow2()
210 return Query.
Types[0].isScalar() &&
213 .clampNumElements(0, v2s32, v2s32)
214 .clampMaxNumElements(0, s64, 1);
217 getActionDefinitionsBuilder(G_CONSTANT)
218 .legalFor({p0, s32, s64})
219 .clampScalar(0, s32, s64)
220 .widenScalarToNextPow2(0);
221 getActionDefinitionsBuilder(G_FCONSTANT)
222 .legalFor({s32, s64})
223 .clampScalar(0, s32, s64);
225 getActionDefinitionsBuilder(G_ICMP)
226 .legalFor({{s32, s32}, {s32, s64}, {s32, p0}})
227 .clampScalar(0, s32, s32)
228 .clampScalar(1, s32, s64)
229 .widenScalarToNextPow2(1);
231 getActionDefinitionsBuilder(G_FCMP)
232 .legalFor({{s32, s32}, {s32, s64}})
233 .clampScalar(0, s32, s32)
234 .clampScalar(1, s32, s64)
235 .widenScalarToNextPow2(1);
238 getActionDefinitionsBuilder({G_ZEXT, G_SEXT, G_ANYEXT})
239 .legalForCartesianProduct({s8, s16, s32, s64}, {s1, s8, s16, s32});
242 getActionDefinitionsBuilder(G_FPTRUNC).legalFor(
243 {{s16, s32}, {s16, s64}, {s32, s64}});
244 getActionDefinitionsBuilder(G_FPEXT).legalFor(
245 {{s32, s16}, {s64, s16}, {s64, s32}});
248 getActionDefinitionsBuilder({G_FPTOSI, G_FPTOUI})
249 .legalForCartesianProduct({s32, s64})
250 .clampScalar(0, s32, s64)
251 .widenScalarToNextPow2(0)
252 .clampScalar(1, s32, s64)
253 .widenScalarToNextPow2(1);
255 getActionDefinitionsBuilder({G_SITOFP, G_UITOFP})
256 .legalForCartesianProduct({s32, s64})
257 .clampScalar(1, s32, s64)
258 .widenScalarToNextPow2(1)
259 .clampScalar(0, s32, s64)
260 .widenScalarToNextPow2(0);
263 getActionDefinitionsBuilder(G_BRCOND).legalFor({s1, s8, s16, s32});
264 getActionDefinitionsBuilder(G_BRINDIRECT).legalFor({p0});
267 getActionDefinitionsBuilder(G_SELECT)
268 .legalFor({{s32, s1}, {s64, s1}, {p0, s1}})
269 .clampScalar(0, s32, s64)
270 .widenScalarToNextPow2(0);
273 getActionDefinitionsBuilder(G_FRAME_INDEX).legalFor({p0});
274 getActionDefinitionsBuilder(G_GLOBAL_VALUE).legalFor({p0});
276 getActionDefinitionsBuilder(G_PTRTOINT)
277 .legalForCartesianProduct({s1, s8, s16, s32, s64}, {p0})
279 .widenScalarToNextPow2(0, 8);
281 getActionDefinitionsBuilder(G_INTTOPTR)
283 return Query.
Types[0].getSizeInBits() != Query.
Types[1].getSizeInBits();
285 .legalFor({{p0, s64}});
289 getActionDefinitionsBuilder(G_BITCAST)
293 .legalForCartesianProduct({s1, s8, s16, s32, s64, s128, v16s8, v8s8, v4s8,
294 v8s16, v4s16, v2s16, v4s32, v2s32, v2s64});
296 getActionDefinitionsBuilder(G_VASTART).legalFor({p0});
300 getActionDefinitionsBuilder(G_VAARG)
301 .customForCartesianProduct({s8, s16, s32, s64, p0}, {p0})
302 .clampScalar(0, s8, s64)
303 .widenScalarToNextPow2(0, 8);
306 getActionDefinitionsBuilder(G_ATOMIC_CMPXCHG_WITH_SUCCESS)
311 getActionDefinitionsBuilder(
312 {G_ATOMICRMW_XCHG, G_ATOMICRMW_ADD, G_ATOMICRMW_SUB, G_ATOMICRMW_AND,
313 G_ATOMICRMW_OR, G_ATOMICRMW_XOR, G_ATOMICRMW_MIN, G_ATOMICRMW_MAX,
314 G_ATOMICRMW_UMIN, G_ATOMICRMW_UMAX, G_ATOMIC_CMPXCHG})
320 getActionDefinitionsBuilder(G_BLOCK_ADDR).legalFor({p0});
323 for (
unsigned Op : {G_MERGE_VALUES, G_UNMERGE_VALUES}) {
324 unsigned BigTyIdx =
Op == G_MERGE_VALUES ? 0 : 1;
325 unsigned LitTyIdx =
Op == G_MERGE_VALUES ? 1 : 0;
328 const LLT &Ty = Query.
Types[TypeIdx];
340 const LLT &Ty = Query.
Types[TypeIdx];
352 getActionDefinitionsBuilder(
Op)
355 [=](
const LegalityQuery &Query) {
return notValidElt(Query, 0); },
358 [=](
const LegalityQuery &Query) {
return notValidElt(Query, 1); },
362 .clampScalar(BigTyIdx, s8, s512)
365 const LLT &Ty = Query.
Types[BigTyIdx];
372 const LLT &Ty = Query.
Types[BigTyIdx];
373 unsigned NewSizeInBits = 1
375 if (NewSizeInBits >= 256) {
377 if (RoundedTo < NewSizeInBits)
378 NewSizeInBits = RoundedTo;
380 return std::make_pair(BigTyIdx,
LLT::scalar(NewSizeInBits));
385 .clampScalar(LitTyIdx, s8, s256)
386 .widenScalarToNextPow2(LitTyIdx, 8)
391 const LLT &BigTy = Query.
Types[BigTyIdx];
392 const LLT &LitTy = Query.
Types[LitTyIdx];
400 .fewerElementsIf([](
const LegalityQuery &Query) {
return true; },
402 return std::make_pair(
403 0, Query.
Types[0].getElementType());
405 .fewerElementsIf([](
const LegalityQuery &Query) {
return true; },
407 return std::make_pair(
408 1, Query.
Types[1].getElementType());
412 getActionDefinitionsBuilder(G_EXTRACT_VECTOR_ELT)
414 const LLT &EltTy = Query.
Types[1].getElementType();
415 return Query.
Types[0] != EltTy;
420 return VecTy == v4s32 || VecTy == v2s64;
423 getActionDefinitionsBuilder(G_BUILD_VECTOR)
424 .legalFor({{v4s32, s32}, {v2s64, s64}})
425 .clampNumElements(0, v4s32, v4s32)
426 .clampNumElements(0, v2s64, v2s64)
430 return Query.
Types[0].getScalarSizeInBits() <
431 Query.
Types[1].getSizeInBits();
433 .minScalarSameAs(1, 0);
447 case TargetOpcode::G_VAARG:
448 return legalizeVaArg(MI, MRI, MIRBuilder);
474 if (Align > PtrSize) {
476 auto AlignMinus1 = MIRBuilder.
buildConstant(IntPtrTy, Align - 1);
479 MIRBuilder.
buildGEP(ListTmp, List, AlignMinus1->getOperand(0).getReg());
490 ValSize,
std::max(Align, PtrSize)));
496 MIRBuilder.
buildGEP(NewList, DstPtr, SizeReg);
virtual MachineInstrBuilder buildConstant(const DstOp &Res, const ConstantInt &Val)
Build and insert Res = G_CONSTANT Val.
unsigned Log2_32_Ceil(uint32_t Value)
Return the ceil log base 2 of the specified value, 32 if the value is zero.
constexpr char Align[]
Key for Kernel::Arg::Metadata::mAlign.
GCNRegPressure max(const GCNRegPressure &P1, const GCNRegPressure &P2)
MachineInstrBuilder buildGEP(unsigned Res, unsigned Op0, unsigned Op1)
Build and insert Res = G_GEP Op0, Op1.
This class represents lattice values for constants.
The LegalityQuery object bundles together all the information that's needed to decide whether a given...
unsigned getReg() const
getReg - Returns the register number.
LLT getType(unsigned Reg) const
Get the low-level type of Reg or LLT{} if Reg is not a generic (target independent) virtual register...
uint64_t alignTo(uint64_t Value, uint64_t Align, uint64_t Skew=0)
Returns the next integer (mod 2**64) that is greater than or equal to Value and is a multiple of Alig...
LegalityPredicate typeIs(unsigned TypeIdx, LLT TypesInit)
True iff the given type index is the specified types.
MachineInstrBuilder buildStore(unsigned Val, unsigned Addr, MachineMemOperand &MMO)
Build and insert G_STORE Val, Addr, MMO.
void eraseFromParent()
Unlink 'this' from the containing basic block and delete it.
unsigned getOpcode() const
Returns the opcode of this MachineInstr.
LegalityPredicate atomicOrderingAtLeastOrStrongerThan(unsigned MMOIdx, AtomicOrdering Ordering)
True iff the specified MMO index has at an atomic ordering of at Ordering or stronger.
MachineMemOperand * getMachineMemOperand(MachinePointerInfo PtrInfo, MachineMemOperand::Flags f, uint64_t s, unsigned base_alignment, const AAMDNodes &AAInfo=AAMDNodes(), const MDNode *Ranges=nullptr, SyncScope::ID SSID=SyncScope::System, AtomicOrdering Ordering=AtomicOrdering::NotAtomic, AtomicOrdering FailureOrdering=AtomicOrdering::NotAtomic)
getMachineMemOperand - Allocate a new MachineMemOperand.
LLT getElementType() const
Returns the vector's element type. Only valid for vector types.
This file declares the targeting of the Machinelegalizer class for AArch64.
Predicate all(Predicate P0, Predicate P1)
True iff P0 and P1 are true.
MachineFunction & getMF()
Getter for the function we currently build.
static LLT scalar(unsigned SizeInBits)
Get a low-level scalar or aggregate "bag of bits".
Abstract class that contains various methods for clients to notify about changes. ...
unsigned const MachineRegisterInfo * MRI
constexpr bool isPowerOf2_32(uint32_t Value)
Return true if the argument is a power of two > 0.
AArch64LegalizerInfo(const AArch64Subtarget &ST)
MachineInstrBuilder buildPtrMask(unsigned Res, unsigned Op0, uint32_t NumBits)
Build and insert Res = G_PTR_MASK Op0, NumBits.
Helper class to build MachineInstr.
void setInstr(MachineInstr &MI)
Set the insertion point to before MI.
bool legalizeCustom(MachineInstr &MI, MachineRegisterInfo &MRI, MachineIRBuilder &MIRBuilder, GISelChangeObserver &Observer) const override
This class contains a discriminated union of information about pointers in memory operands...
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
bool verify(const TargetRegisterInfo &TRI) const
Check that information hold by this instance make sense for the given TRI.
The memory access writes data.
unsigned createGenericVirtualRegister(LLT Ty, StringRef Name="")
Create and return a new generic virtual register with low-level type Ty.
unsigned getSizeInBits() const
Returns the total size of the type. Must only be called on sized types.
This file declares the MachineIRBuilder class.
MachineRegisterInfo - Keep track of information for virtual and physical registers, including vreg register classes, use/def chains for registers, etc.
The memory access reads data.
Representation of each machine instruction.
LegalityPredicate typeInSet(unsigned TypeIdx, std::initializer_list< LLT > TypesInit)
True iff the given type index is one of the specified types.
static void Query(const MachineInstr &MI, AliasAnalysis &AA, bool &Read, bool &Write, bool &Effects, bool &StackPointer)
static LLT pointer(uint16_t AddressSpace, unsigned SizeInBits)
Get a low-level pointer in the given address space (defaulting to 0).
const AArch64InstrInfo * getInstrInfo() const override
MachineInstrBuilder buildLoad(unsigned Res, unsigned Addr, MachineMemOperand &MMO)
Build and insert Res = G_LOAD Addr, MMO.
static LLT vector(uint16_t NumElements, unsigned ScalarSizeInBits)
Get a low-level vector of some number of elements and element width.
ArrayRef< MemDesc > MMODescrs
Operations which require memory can use this to place requirements on the memory type for each MMO...
const MachineOperand & getOperand(unsigned i) const
unsigned Log2_64(uint64_t Value)
Return the floor log base 2 of the specified value, -1 if the value is zero.