void StackmapSpecial::forEachArgImpl( unsigned numIgnoredB3Args, unsigned numIgnoredAirArgs, Inst& inst, Arg::Role role, const ScopedLambda<Inst::EachArgCallback>& callback) { StackmapValue* value = inst.origin->as<StackmapValue>(); ASSERT(value); // Check that insane things have not happened. ASSERT(inst.args.size() >= numIgnoredAirArgs); ASSERT(value->children().size() >= numIgnoredB3Args); ASSERT(inst.args.size() - numIgnoredAirArgs == value->children().size() - numIgnoredB3Args); for (unsigned i = 0; i < inst.args.size() - numIgnoredAirArgs; ++i) { Arg& arg = inst.args[i + numIgnoredAirArgs]; ConstrainedValue child = value->constrainedChild(i + numIgnoredB3Args); Arg::Role thisRole = role; // Cool down the role if the use is cold. if (child.rep().kind() == ValueRep::Any && thisRole == Arg::Use) thisRole = Arg::ColdUse; callback(arg, role, Arg::typeForB3Type(child.value()->type())); } }
void StackmapSpecial::forEachArgImpl( unsigned numIgnoredB3Args, unsigned numIgnoredAirArgs, Inst& inst, RoleMode roleMode, Optional<unsigned> firstRecoverableIndex, const ScopedLambda<Inst::EachArgCallback>& callback) { StackmapValue* value = inst.origin->as<StackmapValue>(); ASSERT(value); // Check that insane things have not happened. ASSERT(inst.args.size() >= numIgnoredAirArgs); ASSERT(value->children().size() >= numIgnoredB3Args); ASSERT(inst.args.size() - numIgnoredAirArgs >= value->children().size() - numIgnoredB3Args); for (unsigned i = 0; i < value->children().size() - numIgnoredB3Args; ++i) { Arg& arg = inst.args[i + numIgnoredAirArgs]; ConstrainedValue child = value->constrainedChild(i + numIgnoredB3Args); Arg::Role role; switch (roleMode) { case ForceLateUseUnlessRecoverable: ASSERT(firstRecoverableIndex); if (arg != inst.args[*firstRecoverableIndex] && arg != inst.args[*firstRecoverableIndex + 1]) { role = Arg::LateColdUse; break; } FALLTHROUGH; case SameAsRep: switch (child.rep().kind()) { case ValueRep::WarmAny: case ValueRep::SomeRegister: case ValueRep::Register: case ValueRep::Stack: case ValueRep::StackArgument: case ValueRep::Constant: role = Arg::Use; break; case ValueRep::LateRegister: role = Arg::LateUse; break; case ValueRep::ColdAny: role = Arg::ColdUse; break; case ValueRep::LateColdAny: role = Arg::LateColdUse; break; default: RELEASE_ASSERT_NOT_REACHED(); break; } break; case ForceLateUse: role = Arg::LateColdUse; break; } Type type = child.value()->type(); callback(arg, role, Arg::typeForB3Type(type), Arg::widthForB3Type(type)); } }
void StackmapSpecial::forEachArgImpl( unsigned numIgnoredB3Args, unsigned numIgnoredAirArgs, Inst& inst, RoleMode roleMode, const ScopedLambda<Inst::EachArgCallback>& callback) { StackmapValue* value = inst.origin->as<StackmapValue>(); ASSERT(value); // Check that insane things have not happened. ASSERT(inst.args.size() >= numIgnoredAirArgs); ASSERT(value->children().size() >= numIgnoredB3Args); ASSERT(inst.args.size() - numIgnoredAirArgs == value->children().size() - numIgnoredB3Args); for (unsigned i = 0; i < inst.args.size() - numIgnoredAirArgs; ++i) { Arg& arg = inst.args[i + numIgnoredAirArgs]; ConstrainedValue child = value->constrainedChild(i + numIgnoredB3Args); Arg::Role role; switch (roleMode) { case SameAsRep: switch (child.rep().kind()) { case ValueRep::WarmAny: case ValueRep::SomeRegister: case ValueRep::Register: case ValueRep::Stack: case ValueRep::StackArgument: case ValueRep::Constant: role = Arg::Use; break; case ValueRep::ColdAny: role = Arg::ColdUse; break; case ValueRep::LateColdAny: role = Arg::LateUse; break; } break; case ForceLateUse: role = Arg::LateUse; break; } callback(arg, role, Arg::typeForB3Type(child.value()->type())); } }
bool StackmapSpecial::isValidImpl( unsigned numIgnoredB3Args, unsigned numIgnoredAirArgs, Inst& inst) { StackmapValue* value = inst.origin->as<StackmapValue>(); ASSERT(value); // Check that insane things have not happened. ASSERT(inst.args.size() >= numIgnoredAirArgs); ASSERT(value->children().size() >= numIgnoredB3Args); // For the Inst to be valid, it needs to have the right number of arguments. if (inst.args.size() - numIgnoredAirArgs < value->children().size() - numIgnoredB3Args) return false; // Regardless of constraints, stackmaps have some basic requirements for their arguments. For // example, you can't have a non-FP-offset address. This verifies those conditions as well as the // argument types. for (unsigned i = 0; i < value->children().size() - numIgnoredB3Args; ++i) { Value* child = value->child(i + numIgnoredB3Args); Arg& arg = inst.args[i + numIgnoredAirArgs]; if (!isArgValidForValue(arg, child)) return false; } // The number of constraints has to be no greater than the number of B3 children. ASSERT(value->m_reps.size() <= value->children().size()); // Verify any explicitly supplied constraints. for (unsigned i = numIgnoredB3Args; i < value->m_reps.size(); ++i) { ValueRep& rep = value->m_reps[i]; Arg& arg = inst.args[i - numIgnoredB3Args + numIgnoredAirArgs]; if (!isArgValidForRep(code(), arg, rep)) return false; } return true; }
bool StackmapSpecial::isValidImpl( unsigned numIgnoredB3Args, unsigned numIgnoredAirArgs, Inst& inst) { StackmapValue* value = inst.origin->as<StackmapValue>(); ASSERT(value); // Check that insane things have not happened. ASSERT(inst.args.size() >= numIgnoredAirArgs); ASSERT(value->children().size() >= numIgnoredB3Args); // For the Inst to be valid, it needs to have the right number of arguments. if (inst.args.size() - numIgnoredAirArgs != value->children().size() - numIgnoredB3Args) return false; // Regardless of constraints, stackmaps have some basic requirements for their arguments. For // example, you can't have a non-FP-offset address. This verifies those conditions as well as the // argument types. for (unsigned i = 0; i < inst.args.size() - numIgnoredAirArgs; ++i) { Value* child = value->child(i + numIgnoredB3Args); Arg& arg = inst.args[i + numIgnoredAirArgs]; switch (arg.kind()) { case Arg::Tmp: case Arg::Imm: case Arg::Imm64: case Arg::Stack: case Arg::CallArg: break; // OK case Arg::Addr: if (arg.base() != Tmp(GPRInfo::callFrameRegister) && arg.base() != Tmp(MacroAssembler::stackPointerRegister)) return false; break; default: return false; } Arg::Type type = Arg::typeForB3Type(child->type()); if (!arg.isType(type)) return false; } // The number of constraints has to be no greater than the number of B3 children. ASSERT(value->m_reps.size() <= value->children().size()); // Verify any explicitly supplied constraints. for (unsigned i = numIgnoredB3Args; i < value->m_reps.size(); ++i) { ValueRep& rep = value->m_reps[i]; Arg& arg = inst.args[i - numIgnoredB3Args + numIgnoredAirArgs]; switch (rep.kind()) { case ValueRep::Any: // We already verified this above. break; case ValueRep::SomeRegister: if (!arg.isTmp()) return false; break; case ValueRep::Register: if (arg != Tmp(rep.reg())) return false; break; case ValueRep::Stack: // This is not a valid input representation. ASSERT_NOT_REACHED(); break; case ValueRep::StackArgument: if (arg == Arg::callArg(rep.offsetFromSP())) break; if (arg.isAddr() && code().frameSize()) { if (arg.base() == Tmp(GPRInfo::callFrameRegister) && arg.offset() == rep.offsetFromSP() - code().frameSize()) break; if (arg.base() == Tmp(MacroAssembler::stackPointerRegister) && arg.offset() == rep.offsetFromSP()) break; } return false; case ValueRep::Constant: // This is not a valid input representation. ASSERT_NOT_REACHED(); break; } } return true; }