bool DetectDeadLanes::isUndefInput(const MachineOperand &MO, bool *CrossCopy) const { if (!MO.isUse()) return false; const MachineInstr &MI = *MO.getParent(); if (!lowersToCopies(MI)) return false; const MachineOperand &Def = MI.getOperand(0); unsigned DefReg = Def.getReg(); if (!TargetRegisterInfo::isVirtualRegister(DefReg)) return false; unsigned DefRegIdx = TargetRegisterInfo::virtReg2Index(DefReg); if (!DefinedByCopy.test(DefRegIdx)) return false; const VRegInfo &DefRegInfo = VRegInfos[DefRegIdx]; LaneBitmask UsedLanes = transferUsedLanes(MI, DefRegInfo.UsedLanes, MO); if (UsedLanes != 0) return false; unsigned MOReg = MO.getReg(); if (TargetRegisterInfo::isVirtualRegister(MOReg)) { const TargetRegisterClass *DstRC = MRI->getRegClass(DefReg); *CrossCopy = isCrossCopy(*MRI, MI, DstRC, MO); } return true; }
// Helper function for getting a MachineOperand's register number and adding it // to RegDefs or RegUses. static void insertDefUse(const MachineOperand &MO, SmallSet<unsigned, 32> &RegDefs, SmallSet<unsigned, 32> &RegUses, unsigned ExcludedReg = 0) { unsigned Reg; if (!MO.isReg() || !(Reg = MO.getReg()) || (Reg == ExcludedReg)) return; if (MO.isDef()) RegDefs.insert(Reg); else if (MO.isUse()) RegUses.insert(Reg); }
static LaneBitmask getUsedRegMask(const MachineOperand &MO, const MachineRegisterInfo &MRI, const LiveIntervals &LIS) { assert(MO.isUse() && MO.isReg() && TargetRegisterInfo::isVirtualRegister(MO.getReg())); if (auto SubReg = MO.getSubReg()) return MRI.getTargetRegisterInfo()->getSubRegIndexLaneMask(SubReg); auto MaxMask = MRI.getMaxLaneMaskForVReg(MO.getReg()); if (MaxMask == LaneBitmask::getLane(0)) // cannot have subregs return MaxMask; // For a tentative schedule LIS isn't updated yet but livemask should remain // the same on any schedule. Subreg defs can be reordered but they all must // dominate uses anyway. auto SI = LIS.getInstructionIndex(*MO.getParent()).getBaseIndex(); return getLiveLaneMask(MO.getReg(), SI, LIS, MRI); }
unsigned llvm::constrainOperandRegClass( const MachineFunction &MF, const TargetRegisterInfo &TRI, MachineRegisterInfo &MRI, const TargetInstrInfo &TII, const RegisterBankInfo &RBI, MachineInstr &InsertPt, const MCInstrDesc &II, const MachineOperand &RegMO, unsigned OpIdx) { unsigned Reg = RegMO.getReg(); // Assume physical registers are properly constrained. assert(TargetRegisterInfo::isVirtualRegister(Reg) && "PhysReg not implemented"); const TargetRegisterClass *RegClass = TII.getRegClass(II, OpIdx, &TRI, MF); // Some of the target independent instructions, like COPY, may not impose any // register class constraints on some of their operands: If it's a use, we can // skip constraining as the instruction defining the register would constrain // it. // We can't constrain unallocatable register classes, because we can't create // virtual registers for these classes, so we need to let targets handled this // case. if (RegClass && !RegClass->isAllocatable()) RegClass = TRI.getConstrainedRegClassForOperand(RegMO, MRI); if (!RegClass) { assert((!isTargetSpecificOpcode(II.getOpcode()) || RegMO.isUse()) && "Register class constraint is required unless either the " "instruction is target independent or the operand is a use"); // FIXME: Just bailing out like this here could be not enough, unless we // expect the users of this function to do the right thing for PHIs and // COPY: // v1 = COPY v0 // v2 = COPY v1 // v1 here may end up not being constrained at all. Please notice that to // reproduce the issue we likely need a destination pattern of a selection // rule producing such extra copies, not just an input GMIR with them as // every existing target using selectImpl handles copies before calling it // and they never reach this function. return Reg; } return constrainRegToClass(MRI, TII, RBI, InsertPt, Reg, *RegClass); }