/* * Check if the current predicted type for the location in ii is specific * enough for what the current opcode wants. If not, return false. */ bool RegionFormer::consumeInput(int i, const InputInfo& ii) { if (ii.dontGuard) return true; auto const type = irgen::predictedTypeFromLocation(m_irgs, ii.loc); if (m_profiling && type <= TBoxedCell && (m_region->blocks().size() > 1 || !m_region->entry()->empty())) { // We don't want side exits when profiling, so only allow instructions that // consume refs at the beginning of the region. return false; } if (!ii.dontBreak && !type.isKnownDataType()) { // Trying to consume a value without a precise enough type. FTRACE(1, "selectTracelet: {} tried to consume {}\n", m_inst.toString(), m_inst.inputs[i].pretty()); return false; } if (!(type <= TBoxedCell) || m_inst.ignoreInnerType || ii.dontGuardInner) { return true; } if (!type.inner().isKnownDataType()) { // Trying to consume a boxed value without a guess for the inner type. FTRACE(1, "selectTracelet: {} tried to consume ref {}\n", m_inst.toString(), m_inst.inputs[i].pretty()); return false; } return true; }
Type Type::relaxToGuardable() const { auto const ty = unspecialize(); if (ty.isKnownDataType()) return ty; if (ty.subtypeOf(UncountedInit)) return Type::UncountedInit; if (ty.subtypeOf(Uncounted)) return Type::Uncounted; if (ty.subtypeOf(Cell)) return Type::Cell; if (ty.subtypeOf(BoxedCell)) return Type::BoxedCell; if (ty.subtypeOf(Gen)) return Type::Gen; not_reached(); }
void IRTranslator::translateLtGtOp(const NormalizedInstruction& i) { auto const op = i.op(); assert(op == Op::Lt || op == Op::Lte || op == Op::Gt || op == Op::Gte); auto leftType = m_hhbcTrans.topType(1, DataTypeGeneric); auto rightType = m_hhbcTrans.topType(0, DataTypeGeneric); if (!leftType.isKnownDataType() || !rightType.isKnownDataType()) { HHIR_UNIMPLEMENTED(LtGtOp-UnknownInput); } bool ok = leftType.subtypeOfAny (Type::Null, Type::Bool, Type::Int, Type::Dbl) && rightType.subtypeOfAny(Type::Null, Type::Bool, Type::Int, Type::Dbl); HHIR_UNIMPLEMENTED_WHEN(!ok, LtGtOp); switch (op) { case Op::Lt : HHIR_EMIT(Lt); case Op::Lte : HHIR_EMIT(Lte); case Op::Gt : HHIR_EMIT(Gt); case Op::Gte : HHIR_EMIT(Gte); default : HHIR_UNIMPLEMENTED(LtGtOp); } }
SSATmp* TraceBuilder::preOptimizeStLoc(IRInstruction* inst) { auto const curType = getLocalType(inst->getExtra<StLoc>()->locId); auto const newType = inst->getSrc(1)->type(); assert(inst->getTypeParam().equals(Type::None)); // There's no need to store the type if it's going to be the same // KindOfFoo. We still have to store string types because we don't // guard on KindOfStaticString vs. KindOfString. auto const bothBoxed = curType.isBoxed() && newType.isBoxed(); auto const sameUnboxed = curType != Type::None && // TODO(#2135185) curType.isKnownDataType() && curType.equals(newType) && !curType.isString(); if (bothBoxed || sameUnboxed) { inst->setOpcode(StLocNT); } return nullptr; }
DataType Type::toDataType() const { assert(!isPtr()); assert(isKnownDataType()); // Order is important here: types must progress from more specific // to less specific to return the most specific DataType. if (subtypeOf(Uninit)) return KindOfUninit; if (subtypeOf(InitNull)) return KindOfNull; if (subtypeOf(Bool)) return KindOfBoolean; if (subtypeOf(Int)) return KindOfInt64; if (subtypeOf(Dbl)) return KindOfDouble; if (subtypeOf(StaticStr)) return KindOfStaticString; if (subtypeOf(Str)) return KindOfString; if (subtypeOf(Arr)) return KindOfArray; if (subtypeOf(Obj)) return KindOfObject; if (subtypeOf(Res)) return KindOfResource; if (subtypeOf(BoxedCell)) return KindOfRef; if (subtypeOf(Cls)) return KindOfClass; always_assert_flog(false, "Bad Type {} in Type::toDataType()", *this); }
DataType Type::toDataType() const { assertx(!maybe(TPtrToGen) || m_bits == kBottom); assertx(isKnownDataType()); // Order is important here: types must progress from more specific // to less specific to return the most specific DataType. if (*this <= TUninit) return KindOfUninit; if (*this <= TInitNull) return KindOfNull; if (*this <= TBool) return KindOfBoolean; if (*this <= TInt) return KindOfInt64; if (*this <= TDbl) return KindOfDouble; if (*this <= TStaticStr) return KindOfStaticString; if (*this <= TStr) return KindOfString; if (*this <= TArr) return KindOfArray; if (*this <= TObj) return KindOfObject; if (*this <= TRes) return KindOfResource; if (*this <= TBoxedCell) return KindOfRef; if (*this <= TCls) return KindOfClass; always_assert_flog(false, "Bad Type {} in Type::toDataType()", *this); }
Type ldRefReturn(Type typeParam) { assert(typeParam.notBoxed()); // Guarding on specialized types and uncommon unions like {Int|Bool} is // expensive enough that we only want to do it in situations where we've // manually confirmed the benefit. if (typeParam.strictSubtypeOf(Type::Obj) && typeParam.getClass()->attrs() & AttrFinal && typeParam.getClass()->isCollectionClass()) { /* * This case is needed for the minstr-translator. * see MInstrTranslator::checkMIState(). */ return typeParam; } auto const type = typeParam.unspecialize(); if (type.isKnownDataType()) return type; if (type <= Type::UncountedInit) return Type::UncountedInit; if (type <= Type::Uncounted) return Type::Uncounted; always_assert(type <= Type::Cell); return Type::InitCell; }