/* * Modify a GeneralEffects to take potential VM re-entry into account. This * affects may-load, may-store, and kills information for the instruction. The * GeneralEffects should already contain AHeapAny in both loads and stores if * it affects those locations for reasons other than re-entry, but does not * need to if it doesn't. * * For loads, we need to take into account EnableArgsInBacktraces: if this flag * is on, any instruction that could re-enter could call debug_backtrace, which * could read the argument locals of any activation record in the callstack. * We don't try to limit the load effects to argument locals here, though, and * just union in all the locals. * * For kills, locations on the eval stack below the re-entry depth should all * be added. * * Important note: because of the `kills' set modifications, an instruction may * not report that it can re-enter if it actually can't. The reason this can * go wrong is that if the instruction was in an inlined function, if we've * removed the DefInlineFP its spOff will not be meaningful (unless it's a * DecRef instruction, which we explicitly adjust in dce.cpp). In this case * the `kills' set will refer to the wrong stack locations. In general this * means instructions that can re-enter must have catch traces---but a few * other instructions are exceptions, either since they are not allowed in * inlined functions or because they take the (possibly-inlined) FramePtr as a * source. */ GeneralEffects may_reenter(const IRInstruction& inst, GeneralEffects x) { auto const may_reenter_is_ok = (inst.taken() && inst.taken()->isCatch()) || inst.is(DecRef, ReleaseVVAndSkip, CIterFree, MIterFree, MIterNext, MIterNextK, IterFree, ABCUnblock, GenericRetDecRefs); always_assert_flog( may_reenter_is_ok, "instruction {} claimed may_reenter, but it isn't allowed to say that", inst ); /* * We want to union `killed_stack' into whatever else the instruction already * said it must kill, but if we end up with an unrepresentable AliasClass we * can't return a set that's too big (the `kills' set is unlike the other * AliasClasses in GeneralEffects in that means it kills /everything/ in the * set, since it's must-information). * * If we can't represent the union, just take the stack, in part because we * have some debugging asserts about this right now---but also nothing * actually uses may_reenter with a non-AEmpty kills at the time of this * writing anyway. */ auto const killed_stack = stack_below(inst.marker().fp(), -inst.marker().spOff().offset - 1); auto const kills_union = x.kills.precise_union(killed_stack); auto const new_kills = kills_union ? *kills_union : killed_stack; return GeneralEffects { x.loads | AHeapAny | (RuntimeOption::EnableArgsInBacktraces ? AFrameAny : AEmpty), x.stores | AHeapAny, x.moves, new_kills }; }
bool IRBuilder::constrainStack(SSATmp* sp, int32_t idx, TypeConstraint tc) { if (!shouldConstrainGuards()) return false; always_assert(IMPLIES(tc.innerCat > DataTypeGeneric, tc.category >= DataTypeCountness)); ITRACE(1, "constrainStack({}, {}, {})\n", *sp->inst(), idx, tc); Indent _i; assert(sp->isA(Type::StkPtr)); // We've hit a LdStack. If getStackValue gives us a value, recurse on // that. Otherwise, look at the instruction that gave us the type of the // stack element. If it's a GuardStk or CheckStk, it's our target. If it's // anything else, the value is new so there's no guard to relax. auto stackInfo = getStackValue(sp, idx); // Sometimes code in HhbcTranslator asks for a value with DataTypeSpecific // but can tolerate a less specific value. If that happens, there's nothing // to constrain. if (!typeFitsConstraint(stackInfo.knownType, tc)) return false; IRInstruction* typeSrc = stackInfo.typeSrc; if (stackInfo.value) { ITRACE(1, "value = {}\n", *stackInfo.value->inst()); return constrainValue(stackInfo.value, tc); } else if (typeSrc->is(AssertStk)) { // If the immutable typeParam fits the constraint, we're done. auto const typeParam = typeSrc->typeParam(); if (typeFitsConstraint(typeParam, tc)) return false; auto const srcIdx = typeSrc->extra<StackOffset>()->offset; auto const srcType = getStackValue(typeSrc->src(0), srcIdx).knownType; auto const newTc = relaxConstraint(tc, typeParam, srcType); ITRACE(1, "tracing through {}, orig tc: {}, new tc: {}\n", *typeSrc, tc, newTc); return constrainStack(typeSrc->src(0), srcIdx, newTc); } else if (typeSrc->is(CheckStk)) { auto changed = false; auto const typeParam = typeSrc->typeParam(); auto const srcIdx = typeSrc->extra<StackOffset>()->offset; auto const srcType = getStackValue(typeSrc->src(0), srcIdx).knownType; // Constrain the guard on the CheckType, but first relax the constraint // based on what's known about srcType. auto const guardTc = relaxConstraint(tc, srcType, typeParam); changed = constrainGuard(typeSrc, guardTc) || changed; // Relax typeParam with its current constraint. This is used below to // recursively relax the constraint on the source, if needed. auto constraint = m_guardConstraints[typeSrc]; constraint.category = std::max(constraint.category, guardTc.category); constraint.innerCat = std::max(constraint.innerCat, guardTc.innerCat); auto const knownType = refineType(relaxType(typeParam, constraint), constraint.assertedType); if (!typeFitsConstraint(knownType, tc)) { auto const newTc = relaxConstraint(tc, knownType, srcType); ITRACE(1, "tracing through {}, orig tc: {}, new tc: {}\n", *typeSrc, tc, newTc); changed = constrainStack(typeSrc->src(0), srcIdx, newTc) || changed; } return changed; } else { ITRACE(1, "typeSrc = {}\n", *typeSrc); return typeSrc->is(GuardStk) && constrainGuard(typeSrc, tc); } }