27 bool MipsCallLowering::MipsHandler::assign(
unsigned VReg,
30 assignValueToReg(VReg, VA);
32 assignValueToAddress(VReg, VA);
41 unsigned ArgLocsStartIndex) {
42 for (
unsigned i = 0; i < VRegs.
size(); ++i)
43 if (!assign(VRegs[i], ArgLocs[ArgLocsStartIndex + i]))
50 if (!MIRBuilder.getMF().getDataLayout().isLittleEndian())
58 const Function &
F = MIRBuilder.getMF().getFunction();
61 MIRBuilder.getMF().getSubtarget().getTargetLowering());
63 for (
unsigned ArgsIndex = 0, ArgLocsIndex = 0; ArgsIndex < Args.
size();
64 ++ArgsIndex, ArgLocsIndex += SplitLength) {
68 if (SplitLength > 1) {
72 for (
unsigned i = 0; i < SplitLength; ++i)
75 if (!handleSplit(VRegs, ArgLocs, ArgLocsIndex, Args[ArgsIndex].
Reg))
78 if (!assign(Args[ArgsIndex].
Reg, ArgLocs[ArgLocsIndex]))
92 void assignValueToReg(
unsigned ValVReg,
const CCValAssign &VA)
override;
97 void assignValueToAddress(
unsigned ValVReg,
const CCValAssign &VA)
override;
101 unsigned ArgsReg)
override;
103 virtual void markPhysRegUsed(
unsigned PhysReg) {
107 void buildLoad(
unsigned Val,
const CCValAssign &VA) {
109 unsigned Addr = getStackAddress(VA, MMO);
114 class CallReturnHandler :
public IncomingValueHandler {
118 : IncomingValueHandler(MIRBuilder, MRI), MIB(MIB) {}
121 void markPhysRegUsed(
unsigned PhysReg)
override {
130 void IncomingValueHandler::assignValueToReg(
unsigned ValVReg,
134 case CCValAssign::LocInfo::SExt:
135 case CCValAssign::LocInfo::ZExt:
136 case CCValAssign::LocInfo::AExt: {
137 auto Copy = MIRBuilder.buildCopy(
LLT{VA.
getLocVT()}, PhysReg);
138 MIRBuilder.buildTrunc(ValVReg, Copy);
142 MIRBuilder.buildCopy(ValVReg, PhysReg);
145 markPhysRegUsed(PhysReg);
148 unsigned IncomingValueHandler::getStackAddress(
const CCValAssign &VA,
160 unsigned AddrReg =
MRI.createGenericVirtualRegister(
LLT::pointer(0, 32));
161 MIRBuilder.buildFrameIndex(AddrReg, FI);
166 void IncomingValueHandler::assignValueToAddress(
unsigned ValVReg,
171 unsigned LoadReg =
MRI.createGenericVirtualRegister(
LLT::scalar(32));
172 buildLoad(LoadReg, VA);
173 MIRBuilder.buildTrunc(ValVReg, LoadReg);
175 buildLoad(ValVReg, VA);
180 unsigned ArgLocsStartIndex,
182 if (!assignVRegs(VRegs, ArgLocs, ArgLocsStartIndex))
184 setLeastSignificantFirst(VRegs);
185 MIRBuilder.buildMerge(ArgsReg, VRegs);
197 void assignValueToReg(
unsigned ValVReg,
const CCValAssign &VA)
override;
202 void assignValueToAddress(
unsigned ValVReg,
const CCValAssign &VA)
override;
206 unsigned ArgsReg)
override;
208 unsigned extendRegister(
unsigned ValReg,
const CCValAssign &VA);
214 void OutgoingValueHandler::assignValueToReg(
unsigned ValVReg,
217 unsigned ExtReg = extendRegister(ValVReg, VA);
218 MIRBuilder.buildCopy(PhysReg, ExtReg);
222 unsigned OutgoingValueHandler::getStackAddress(
const CCValAssign &VA,
226 unsigned SPReg =
MRI.createGenericVirtualRegister(p0);
227 MIRBuilder.buildCopy(SPReg, Mips::SP);
229 unsigned OffsetReg =
MRI.createGenericVirtualRegister(s32);
231 MIRBuilder.buildConstant(OffsetReg, Offset);
233 unsigned AddrReg =
MRI.createGenericVirtualRegister(p0);
234 MIRBuilder.buildGEP(AddrReg, SPReg, OffsetReg);
245 void OutgoingValueHandler::assignValueToAddress(
unsigned ValVReg,
248 unsigned Addr = getStackAddress(VA, MMO);
249 unsigned ExtReg = extendRegister(ValVReg, VA);
250 MIRBuilder.buildStore(ExtReg, Addr, *MMO);
253 unsigned OutgoingValueHandler::extendRegister(
unsigned ValReg,
258 unsigned ExtReg =
MRI.createGenericVirtualRegister(LocTy);
259 MIRBuilder.buildSExt(ExtReg, ValReg);
263 unsigned ExtReg =
MRI.createGenericVirtualRegister(LocTy);
264 MIRBuilder.buildZExt(ExtReg, ValReg);
268 unsigned ExtReg =
MRI.createGenericVirtualRegister(LocTy);
269 MIRBuilder.buildAnyExt(ExtReg, ValReg);
283 unsigned ArgLocsStartIndex,
285 MIRBuilder.buildUnmerge(VRegs, ArgsReg);
286 setLeastSignificantFirst(VRegs);
287 if (!assignVRegs(VRegs, ArgLocs, ArgLocsStartIndex))
308 return CCValAssign::LocInfo::SExt;
310 return CCValAssign::LocInfo::ZExt;
311 return CCValAssign::LocInfo::AExt;
314 template <
typename T>
317 for (
unsigned i = 0; i < ArgLocs.
size(); ++i) {
320 Arguments[i].VT, Arguments[i].ArgVT, Arguments[i].Flags);
340 if (!VRegs.
empty()) {
350 "For each split Type there should be exactly one VReg.");
355 for (
unsigned i = 0; i < SplitEVTs.
size(); ++i) {
356 ArgInfo CurArgInfo =
ArgInfo{VRegs[i], SplitEVTs[i].getTypeForEVT(Ctx)};
358 splitToValueTypes(CurArgInfo, 0, RetInfos, OrigArgIndices);
362 subTargetRegTypeForCallingConv(F, RetInfos, OrigArgIndices, Outs);
370 OutgoingValueHandler RetHandler(MIRBuilder, MF.
getRegInfo(),
Ret);
371 if (!RetHandler.handle(ArgLocs, RetInfos)) {
406 splitToValueTypes(AInfo, i, ArgInfos, OrigArgIndices);
411 subTargetRegTypeForCallingConv(F, ArgInfos, OrigArgIndices, Ins);
425 IncomingValueHandler Handler(MIRBuilder, MF.
getRegInfo());
426 if (!Handler.handle(ArgLocs, ArgInfos))
441 for (
auto &
Arg : OrigArgs) {
444 if (
Arg.Flags.isByVal() ||
Arg.Flags.isSRet())
458 MIRBuilder.
buildInstr(Mips::ADJCALLSTACKDOWN);
472 FuncOrigArgs.reserve(OrigArgs.size());
477 for (
auto &
Arg : OrigArgs) {
481 FuncOrigArgs.push_back(Entry);
483 splitToValueTypes(
Arg, i, ArgInfos, OrigArgIndices);
488 subTargetRegTypeForCallingConv(F, ArgInfos, OrigArgIndices, Outs);
499 OutgoingValueHandler RetHandler(MIRBuilder, MF.
getRegInfo(), MIB);
500 if (!RetHandler.handle(ArgLocs, ArgInfos)) {
504 unsigned NextStackOffset = CCInfo.getNextStackOffset();
507 NextStackOffset =
alignTo(NextStackOffset, StackAlignment);
517 splitToValueTypes(OrigRet, 0, ArgInfos, OrigRetIndices);
520 subTargetRegTypeForCallingConv(F, ArgInfos, OrigRetIndices, Ins);
529 CallReturnHandler Handler(MIRBuilder, MF.
getRegInfo(), MIB);
530 if (!Handler.handle(ArgLocs, ArgInfos))
539 template <
typename T>
540 void MipsCallLowering::subTargetRegTypeForCallingConv(
547 for (
auto &
Arg : Args) {
555 for (
unsigned i = 0; i < NumRegs; ++i) {
563 ISDArgs.
emplace_back(Flags, RegisterVT, VT,
true, OrigArgIndices[ArgNo],
570 void MipsCallLowering::splitToValueTypes(
571 const ArgInfo &OrigArg,
unsigned OriginalIndex,
578 SplitArgsOrigIndices.
push_back(OriginalIndex);
bool isVarArg() const
isVarArg - Return true if this function takes a variable number of arguments.
const MachineInstrBuilder & add(const MachineOperand &MO) const
A parsed version of the target data layout string in and methods for querying it. ...
C - The default llvm calling convention, compatible with C.
This class represents lattice values for constants.
virtual const TargetRegisterInfo * getRegisterInfo() const
getRegisterInfo - If register information is available, return it.
void push_back(const T &Elt)
unsigned getValNo() const
unsigned const TargetRegisterInfo * TRI
uint64_t alignTo(uint64_t Value, uint64_t Align, uint64_t Skew=0)
Returns the next integer (mod 2**64) that is greater than or equal to Value and is a multiple of Alig...
LLVMContext & getContext() const
Return the LLVMContext in which this type was uniqued.
A description of a memory reference used in the backend.
const DataLayout & getDataLayout() const
Get the data layout for the module's target platform.
CCAssignFn * CCAssignFnForCall() const
bool isIntegerTy() const
True if this is an instance of IntegerType.
unsigned GetCalleeAllocdArgSizeInBytes(CallingConv::ID CC) const
Obtain the size of the area allocated by the callee for arguments.
MVT getRegisterTypeForCallingConv(LLVMContext &Context, CallingConv::ID CC, EVT VT) const override
Return the register type for a given MVT, ensuring vectors are treated as a series of gpr sized integ...
The MachineFrameInfo class represents an abstract stack frame until prolog/epilog code is inserted...
bool assignVRegs(ArrayRef< unsigned > VRegs, ArrayRef< CCValAssign > ArgLocs, unsigned Index)
const char * getSymbolName() const
LocInfo getLocInfo() const
unsigned getSizeInBits() const
auto reverse(ContainerTy &&C, typename std::enable_if< has_rbegin< ContainerTy >::value >::type *=nullptr) -> decltype(make_range(C.rbegin(), C.rend()))
unsigned getSizeInBits() const
Return the size of the specified value type in bits.
Type * getType() const
All values are typed, get the type of this value.
unsigned getABIAlignmentForCallingConv(Type *ArgTy, DataLayout DL) const override
Return the correct alignment for the current calling convention.
MachineInstrBuilder buildInstrNoInsert(unsigned Opcode)
Build but don't insert <empty> = Opcode <empty>.
MachineFunction & getMF()
Getter for the function we currently build.
void AnalyzeCallResult(const SmallVectorImpl< ISD::InputArg > &Ins, CCAssignFn Fn, const Type *RetTy, const char *Func)
static LLT scalar(unsigned SizeInBits)
Get a low-level scalar or aggregate "bag of bits".
MipsCallLowering(const MipsTargetLowering &TLI)
void setOrigAlign(unsigned A)
void ComputeValueVTs(const TargetLowering &TLI, const DataLayout &DL, Type *Ty, SmallVectorImpl< EVT > &ValueVTs, SmallVectorImpl< uint64_t > *Offsets=nullptr, uint64_t StartingOffset=0)
ComputeValueVTs - Given an LLVM IR type, compute a sequence of EVTs that represent all the individual...
const DataLayout & getDataLayout() const
Return the DataLayout attached to the Module associated to this MF.
static CCValAssign getReg(unsigned ValNo, MVT ValVT, unsigned RegNo, MVT LocVT, LocInfo HTP)
virtual const uint32_t * getCallPreservedMask(const MachineFunction &MF, CallingConv::ID) const
Return a mask of call-preserved registers for the given calling convention on the current function...
void addLiveIn(MCPhysReg PhysReg, LaneBitmask LaneMask=LaneBitmask::getAll())
Adds the specified register as a live in.
unsigned const MachineRegisterInfo * MRI
const TargetSubtargetInfo & getSubtarget() const
getSubtarget - Return the subtarget for which this machine code is being compiled.
The instances of the Type class are immutable: once they are created, they are never changed...
This is an important class for using LLVM in a threaded context.
MachineInstrBuilder buildInstr(unsigned Opcode)
Build and insert <empty> = Opcode <empty>.
size_t size() const
size - Get the array size.
LLVM_ATTRIBUTE_ALWAYS_INLINE iterator begin()
bool lowerFormalArguments(MachineIRBuilder &MIRBuilder, const Function &F, ArrayRef< unsigned > VRegs) const override
This hook must be implemented to lower the incoming (formal) arguments, described by Args...
bool isPointerTy() const
True if this is an instance of PointerType.
Helper class to build MachineInstr.
AMDGPU Lower Kernel Arguments
void setLeastSignificantFirst(SmallVectorImpl< unsigned > &VRegs)
const MachineInstrBuilder & addRegMask(const uint32_t *Mask) const
static void setLocInfo(SmallVectorImpl< CCValAssign > &ArgLocs, const SmallVectorImpl< T > &Arguments)
void setArgFlags(ArgInfo &Arg, unsigned OpIdx, const DataLayout &DL, const FuncInfoTy &FuncInfo) const
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function. ...
std::vector< ArgListEntry > ArgListTy
TargetRegisterInfo base class - We assume that the target defines a static array of TargetRegisterDes...
This class contains a discriminated union of information about pointers in memory operands...
unsigned getStackAlignment() const
getStackAlignment - This method returns the number of bytes to which the stack pointer must be aligne...
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
EVT getValueType(const DataLayout &DL, Type *Ty, bool AllowUnknown=false) const
Return the EVT corresponding to this LLVM type.
The memory access writes data.
unsigned getNumRegistersForCallingConv(LLVMContext &Context, CallingConv::ID CC, EVT VT) const override
Return the number of registers for a given MVT, ensuring vectors are treated as a series of gpr sized...
CallingConv::ID getCallingConv() const
getCallingConv()/setCallingConv(CC) - These method get and set the calling convention of this functio...
MachineOperand class - Representation of each machine instruction operand.
CCValAssign - Represent assignment of one arg/retval to a location.
Information about stack frame layout on the target.
int CreateFixedObject(uint64_t Size, int64_t SPOffset, bool IsImmutable, bool isAliased=false)
Create a new object at a fixed location on the stack.
const Function & getFunction() const
Return the LLVM function that this machine code represents.
This file declares the MachineIRBuilder class.
amdgpu Simplify well known AMD library false Value Value * Arg
MachineRegisterInfo - Keep track of information for virtual and physical registers, including vreg register classes, use/def chains for registers, etc.
The memory access reads data.
static MachinePointerInfo getFixedStack(MachineFunction &MF, int FI, int64_t Offset=0)
Return a MachinePointerInfo record that refers to the specified FrameIndex.
void AnalyzeReturn(const SmallVectorImpl< ISD::OutputArg > &Outs, CCAssignFn Fn)
LLVM_ATTRIBUTE_ALWAYS_INLINE iterator end()
const MachineInstrBuilder & addImm(int64_t Val) const
Add a new immediate operand.
bool lowerReturn(MachineIRBuilder &MIRBuilder, const Value *Val, ArrayRef< unsigned > VRegs) const override
This hook must be implemented to lower outgoing return values, described by Val, into the specified v...
void emplace_back(ArgTypes &&... Args)
MachineRegisterInfo & getRegInfo()
getRegInfo - Return information about the registers currently in use.
unsigned getLocMemOffset() const
const MachineBasicBlock & getMBB() const
Getter for the basic block we currently build.
virtual const TargetFrameLowering * getFrameLowering() const
static CCValAssign getMem(unsigned ValNo, MVT ValVT, unsigned Offset, MVT LocVT, LocInfo HTP)
const LLVMTargetMachine & getTarget() const
getTarget - Return the target machine this machine code is compiled with
bool isSymbol() const
isSymbol - Tests if this is a MO_ExternalSymbol operand.
This file describes how to lower LLVM calls to machine code calls.
bool isReg() const
isReg - Tests if this is a MO_Register operand.
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
static bool isSupportedType(Type *T)
MachineInstrBuilder insertInstr(MachineInstrBuilder MIB)
Insert an existing instruction at the insertion point.
Module * getParent()
Get the module that this global value is contained inside of...
LLVM Value Representation.
static LLT pointer(uint16_t AddressSpace, unsigned SizeInBits)
Get a low-level pointer in the given address space (defaulting to 0).
bool lowerCall(MachineIRBuilder &MIRBuilder, CallingConv::ID CallConv, const MachineOperand &Callee, const ArgInfo &OrigRet, ArrayRef< ArgInfo > OrigArgs) const override
This hook must be implemented to lower the given call instruction, including argument and return valu...
MachineInstrBuilder buildLoad(unsigned Res, unsigned Addr, MachineMemOperand &MMO)
Build and insert Res = G_LOAD Addr, MMO.
CCAssignFn * CCAssignFnForReturn() const
static MachinePointerInfo getStack(MachineFunction &MF, int64_t Offset, uint8_t ID=0)
Stack pointer relative access.
static CCValAssign::LocInfo determineLocInfo(const MVT RegisterVT, const EVT VT, const ISD::ArgFlagsTy &Flags)
const MachineInstrBuilder & addDef(unsigned RegNo, unsigned Flags=0, unsigned SubReg=0) const
Add a virtual register definition operand.
unsigned getLocReg() const
unsigned AllocateStack(unsigned Size, unsigned Align)
AllocateStack - Allocate a chunk of stack space with the specified size and alignment.
constexpr char Args[]
Key for Kernel::Metadata::mArgs.
bool handle(ArrayRef< CCValAssign > ArgLocs, ArrayRef< CallLowering::ArgInfo > Args)
iterator_range< arg_iterator > args()
bool empty() const
empty - Check if the array is empty.