SSATmp* IRBuilder::preOptimizeCheckStk(IRInstruction* inst) { auto const newType = inst->typeParam(); auto sp = inst->src(0); auto offset = inst->extra<CheckStk>()->offset; auto stkVal = getStackValue(sp, offset); auto const oldType = stkVal.knownType; if (oldType.isBoxed() && newType.isBoxed() && (oldType.not(newType) || newType < oldType)) { /* This CheckStk serves to update the inner type hint for a boxed * value, which requires no runtime work. This depends on the type being * boxed, and constraining it with DataTypeCountness will do it. */ constrainStack(sp, offset, DataTypeCountness); return gen(AssertStk, newType, StackOffset(offset), sp); } if (newType.not(oldType)) { /* This check will always fail. It's probably due to an incorrect * prediction. Generate a Jmp, and return the source because * following instructions may depend on the output of CheckStk * (they'll be DCEd later). Note that we can't use convertToJmp * because the return value isn't nullptr, so the original * instruction won't be inserted into the stream. */ gen(Jmp, inst->taken()); return sp; } if (newType >= oldType) { // The new type isn't better than the old type. return sp; } return nullptr; }
SSATmp* TraceBuilder::preOptimizeStLoc(IRInstruction* inst) { auto locId = inst->extra<StLoc>()->locId; auto const curType = localType(locId, DataTypeGeneric); auto const newType = inst->src(1)->type(); assert(inst->typeParam().equals(Type::None)); // There's no need to store the type if it's going to be the same // KindOfFoo. We still have to store string types because we don't // guard on KindOfStaticString vs. KindOfString. auto const bothBoxed = curType.isBoxed() && newType.isBoxed(); auto const sameUnboxed = curType != Type::None && // TODO(#2135185) curType.isSameKindOf(newType) && !curType.isString(); if (bothBoxed || sameUnboxed) { // TODO(t2598894) once relaxGuards supports proper type reflowing, we // should be able to relax the constraint here and degrade StLocNT to // StLoc if we relax its input. if (sameUnboxed) constrainLocal(locId, DataTypeSpecific, "StLoc -> StLocNT"); inst->setOpcode(StLocNT); } return nullptr; }
SSATmp* IRBuilder::preOptimizeCheckType(IRInstruction* inst) { SSATmp* src = inst->src(0); auto const oldType = src->type(); auto const newType = inst->typeParam(); if (oldType.isBoxed() && newType.isBoxed() && (oldType.not(newType) || newType < oldType)) { /* This CheckType serves to update the inner type hint for a boxed value, * which requires no runtime work. This depends on the type being boxed, * and constraining it with DataTypeCountness will do it. */ constrainValue(src, DataTypeCountness); return gen(AssertType, newType, src); } if (oldType.not(newType)) { /* This check will always fail. It's probably due to an incorrect * prediction. Generate a Jmp, and return src because * following instructions may depend on the output of CheckType * (they'll be DCEd later). Note that we can't use convertToJmp * because the return value isn't nullptr, so the original * instruction won't be inserted into the stream. */ gen(Jmp, inst->taken()); return src; } if (newType >= oldType) { /* The type of the src is the same or more refined than type, so the guard * is unnecessary. */ return src; } return nullptr; }
RuntimeType Type::toRuntimeType() const { assert(!isPtr()); auto const outer = isBoxed() ? KindOfRef : toDataType(); auto const inner = isBoxed() ? innerType().toDataType() : KindOfNone; auto rtt = RuntimeType{outer, inner}; if (isSpecialized()) { if (subtypeOf(Type::Arr)) { return rtt.setArrayKind(getArrayKind()); } else if (subtypeOf(Type::Obj)) { return rtt.setKnownClass(getClass()); } } return rtt; }
SSATmp* TraceBuilder::preOptimizeCheckLoc(IRInstruction* inst) { auto const locId = inst->extra<CheckLoc>()->locId; Type typeParam = inst->typeParam(); if (auto const prevValue = localValue(locId, DataTypeGeneric)) { return gen(CheckType, typeParam, inst->taken(), prevValue); } auto const prevType = localType(locId, DataTypeSpecific); if (prevType <= typeParam) { return inst->src(0); } else { // // Normally, it doesn't make sense to be checking something that's // deemed to fail. Incompatible boxed types are ok though, since // we don't track them precisely, but instead check them at every // use. // // However, in JitPGO mode right now, this pathological case can // happen, because profile counters are not accurate and we // currently don't analyze Block post-conditions when picking its // successors during region selection. This can lead to // incompatible types in blocks selected for the same region. // if (!typeParam.isBoxed() || !prevType.isBoxed()) { if ((typeParam & prevType) == Type::Bottom) { assert(RuntimeOption::EvalJitPGO); return gen(Jmp, inst->taken()); } } } return nullptr; }
SSATmp* IRBuilder::preOptimizeDecRefLoc(IRInstruction* inst) { auto const locId = inst->extra<DecRefLoc>()->locId; /* * Refine the type if we can. * * We can't really rely on the types held in the boxed values since aliasing * stores may change them, and we only guard during LdRef. So we have to * change any boxed type to BoxedCell. * * DataTypeGeneric is used because we don't want a DecRef to be the only * thing keeping a guard around. This code is designed to tolerate the * incoming type being relaxed. */ auto knownType = localType(locId, DataTypeGeneric); if (knownType.isBoxed()) { knownType = Type::BoxedCell; } /* * If we have the local value in flight, use a DecRef on it instead of doing * it in memory. */ if (auto tmp = localValue(locId, DataTypeGeneric)) { gen(DecRef, tmp); inst->convertToNop(); return nullptr; } if (!typeMightRelax()) { inst->setTypeParam(std::min(knownType, inst->typeParam())); } return nullptr; }
SSATmp* IRBuilder::preOptimizeCheckLoc(IRInstruction* inst) { auto const locId = inst->extra<CheckLoc>()->locId; Type typeParam = inst->typeParam(); SSATmp* src = inst->src(0); if (auto const prevValue = localValue(locId, DataTypeGeneric)) { return gen(CheckType, typeParam, inst->taken(), prevValue); } auto const prevType = localType(locId, DataTypeGeneric); if (prevType <= typeParam) { return src; } if (prevType.not(typeParam)) { if (typeParam.isBoxed() && prevType.isBoxed()) { /* When both types are non-intersecting boxed types, we're just * updating the inner type hint. This requires no runtime work. */ constrainLocal(locId, DataTypeCountness, "preOptimizeCheckLoc"); return gen(AssertLoc, LocalId(locId), typeParam, src); } /* This check will always fail. It's probably due to an incorrect * prediction. Generate a Jmp, and return the source because * following instructions may depend on the output of CheckLoc * (they'll be DCEd later). Note that we can't use convertToJmp * because the return value isn't nullptr, so the original * instruction won't be inserted into the stream. */ gen(Jmp, inst->taken()); return src; } return nullptr; }
SSATmp* TraceBuilder::preOptimizeDecRefLoc(IRInstruction* inst) { auto const locId = inst->extra<DecRefLoc>()->locId; /* * Refine the type if we can. * * We can't really rely on the types held in the boxed values since * aliasing stores may change them, and we only guard during LdRef. * So we have to change any boxed type to BoxedCell. */ auto knownType = localType(locId, DataTypeCountness); if (knownType.isBoxed()) { knownType = Type::BoxedCell; } if (knownType != Type::None) { // TODO(#2135185) inst->setTypeParam( Type::mostRefined(knownType, inst->typeParam()) ); } /* * If we have the local value in flight, use a DecRef on it instead * of doing it in memory. */ if (auto tmp = localValue(locId, DataTypeCountness)) { gen(DecRef, tmp); inst->convertToNop(); } return nullptr; }
void MInstrEffects::get(const IRInstruction* inst, const FrameStateMgr& frame, LocalStateHook& hook) { // If the base for this instruction is a local address, the helper call might // have side effects on the local's value auto const base = inst->src(minstrBaseIdx(inst->op())); auto const locInstr = base->inst(); // Right now we require that the address of any affected local is the // immediate source of the base tmp. This isn't actually specified in the ir // spec right now but will intend to make it more general soon. if (locInstr->op() != LdLocAddr) return; auto const locId = locInstr->extra<LdLocAddr>()->locId; auto const baseType = frame.localType(locId); MInstrEffects effects(inst->op(), baseType.ptr(Ptr::Frame)); if (effects.baseTypeChanged || effects.baseValChanged) { auto const ty = effects.baseType.derefIfPtr(); if (ty.isBoxed()) { hook.setLocalType(locId, Type::BoxedInitCell); hook.setBoxedLocalPrediction(locId, ty); } else { hook.setLocalType(locId, ty); } } }
SSATmp* TraceBuilder::preOptimizeStLoc(IRInstruction* inst) { auto const curType = getLocalType(inst->getExtra<StLoc>()->locId); auto const newType = inst->getSrc(1)->type(); assert(inst->getTypeParam().equals(Type::None)); // There's no need to store the type if it's going to be the same // KindOfFoo. We still have to store string types because we don't // guard on KindOfStaticString vs. KindOfString. auto const bothBoxed = curType.isBoxed() && newType.isBoxed(); auto const sameUnboxed = curType != Type::None && // TODO(#2135185) curType.isKnownDataType() && curType.equals(newType) && !curType.isString(); if (bothBoxed || sameUnboxed) { inst->setOpcode(StLocNT); } return nullptr; }
SSATmp* IRBuilder::preOptimizeStLoc(IRInstruction* inst) { // Guard relaxation might change the current local type, so don't try to // change to StLocNT until after relaxation happens. if (typeMightRelax()) return nullptr; auto locId = inst->extra<StLoc>()->locId; auto const curType = localType(locId, DataTypeGeneric); auto const newType = inst->src(1)->type(); assert(!inst->hasTypeParam()); /* * There's no need to store the type if it's going to be the same * KindOfFoo. We'll still have to store string types because we * aren't specific about storing KindOfStaticString * vs. KindOfString, and a Type::Null might mean KindOfUninit or * KindOfNull. */ auto const bothBoxed = curType.isBoxed() && newType.isBoxed(); auto const sameUnboxed = [&] { auto avoidable = { Type::Uninit, Type::InitNull, Type::Bool, Type::Int, Type::Dbl, // No strings. Type::Arr, Type::Obj, Type::Res }; for (auto t : avoidable) { if (curType <= t && newType <= t) return true; } return false; }; if (bothBoxed || sameUnboxed()) { inst->setOpcode(StLocNT); } return nullptr; }
SSATmp* TraceBuilder::preOptimizeStLoc(IRInstruction* inst) { // Guard relaxation might change the current local type, so don't try to // change to StLocNT until after relaxation happens. if (!inReoptimize()) return nullptr; auto locId = inst->extra<StLoc>()->locId; auto const curType = localType(locId, DataTypeGeneric); auto const newType = inst->src(1)->type(); assert(inst->typeParam() == Type::None); // There's no need to store the type if it's going to be the same // KindOfFoo. We still have to store string types because we don't // guard on KindOfStaticString vs. KindOfString. auto const bothBoxed = curType.isBoxed() && newType.isBoxed(); auto const sameUnboxed = curType.isSameKindOf(newType) && !curType.isString(); if (bothBoxed || sameUnboxed) { inst->setOpcode(StLocNT); } return nullptr; }
void emitVGetL(HTS& env, int32_t id) { auto value = ldLoc(env, id, makeExit(env), DataTypeCountnessInit); auto const t = value->type(); always_assert(t.isBoxed() || t.notBoxed()); if (t.notBoxed()) { if (value->isA(Type::Uninit)) { value = cns(env, Type::InitNull); } value = gen(env, Box, value); stLocRaw(env, id, fp(env), value); } pushIncRef(env, value); }
DataType Type::toDataType() const { assert(!isPtr()); if (isBoxed()) { return KindOfRef; } // Order is important here: types must progress from more specific // to less specific to return the most specific DataType. if (subtypeOf(Uninit)) return KindOfUninit; if (subtypeOf(Null)) return KindOfNull; if (subtypeOf(Bool)) return KindOfBoolean; if (subtypeOf(Int)) return KindOfInt64; if (subtypeOf(Dbl)) return KindOfDouble; if (subtypeOf(StaticStr)) return KindOfStaticString; if (subtypeOf(Str)) return KindOfString; if (subtypeOf(Arr)) return KindOfArray; if (subtypeOf(Obj)) return KindOfObject; if (subtypeOf(Res)) return KindOfResource; if (subtypeOf(Cls)) return KindOfClass; if (subtypeOf(UncountedInit)) return KindOfUncountedInit; if (subtypeOf(Uncounted)) return KindOfUncounted; if (subtypeOf(Gen)) return KindOfAny; not_reached(); }
SSATmp* IRBuilder::preOptimizeAssertTypeOp(IRInstruction* inst, Type oldType, ConstraintFunc constrain) { auto const newType = inst->typeParam(); if (oldType.not(newType)) { // If both types are boxed this is ok and even expected as a means to // update the hint for the inner type. if (oldType.isBoxed() && newType.isBoxed()) return nullptr; // We got external information (probably from static analysis) that // conflicts with what we've built up so far. There's no reasonable way to // continue here: we can't properly fatal the request because we can't make // a catch trace or SpillStack without HhbcTranslator, we can't punt on // just this instruction because we might not be in the initial translation // phase, and we can't just plow on forward since we'll probably generate // malformed IR. Since this case is very rare, just punt on the whole trace // so it gets interpreted. TRACE_PUNT("Invalid AssertTypeOp"); } // Asserting in these situations doesn't add any information. if (oldType <= Type::Cls || newType == Type::Gen) return inst->src(0); // We're asserting a strict subtype of the old type, so keep the assert // around. if (newType < oldType) return nullptr; // oldType is at least as good as the new type. Kill this assert op but // preserve the type we were asserting in case the source type gets relaxed // past it. if (newType >= oldType) { constrain({DataTypeGeneric, newType}); return inst->src(0); } // AssertLoc is special here because it's the one AssertTypeOp that doesn't // do its own filtering of the destination type based on the input type and // the asserted type. This will hopefully be fixed soon. if (inst->is(AssertLoc)) { // Now we're left with cases where neither type is a subtype of the other // but they have some nonzero intersection. We want to end up asserting the // intersection, but we have to constrain the input to avoid reintroducing // types that were removed from the original typeParam. auto const intersect = newType & oldType; inst->setTypeParam(intersect); TypeConstraint tc; if (intersect != newType) { Type relaxed; // Find the most general constraint that doesn't modify the type being // asserted. while ((relaxed = newType & relaxType(oldType, tc)) != intersect) { if (tc.category > DataTypeGeneric && relaxed.maybeBoxed() && intersect.maybeBoxed() && (relaxed & Type::Cell) == (intersect & Type::Cell)) { // If the inner type is why we failed, constrain that a level. incCategory(tc.innerCat); } else { incCategory(tc.category); } } } constrain(tc); } return nullptr; }