Beispiel #1
0
// This function attempts to find a pre-coloring hint from two
// different sources: If tmp comes from a DefLabel, it will scan up to
// the SSATmps providing values to incoming Jmp_s to look for a
// hint. If tmp is consumed by a Jmp_, look for other incoming Jmp_s
// to its destination and see if any of them have already been given a
// register. If all of these fail, let normal register allocation
// proceed unhinted.
RegNumber LinearScan::getJmpPreColor(SSATmp* tmp, uint32_t regIndex,
                                     bool isReload) {
  IRInstruction* srcInst = tmp->inst();
  const JmpList& jmps = m_jmps[tmp];
  if (isReload && (srcInst->op() == DefLabel || !jmps.empty())) {
    // If we're precoloring a Reload of a temp that we'd normally find
    // a hint for, just return the register allocated to the spilled
    // temp.
    auto reg = m_allocInfo[tmp].reg(regIndex);
    assert(reg != reg::noreg);
    return reg;
  }

  if (srcInst->op() == DefLabel) {
    // Figure out which dst of the label is tmp
    for (unsigned i = 0, n = srcInst->numDsts(); i < n; ++i) {
      if (srcInst->dst(i) == tmp) {
        auto reg = findLabelSrcReg(m_allocInfo, srcInst, i, regIndex);
        // Until we handle loops, it's a bug to try and allocate a
        // register to a DefLabel's dest before all of its incoming
        // Jmp_s have had their srcs allocated, unless the incoming
        // block is unreachable.
        const DEBUG_ONLY bool unreachable =
          std::find(m_blocks.begin(), m_blocks.end(),
                    srcInst->block()) == m_blocks.end();
        always_assert(reg != reg::noreg || unreachable);
        return reg;
      }
    }
    not_reached();
  }

  // If srcInst wasn't a label, check if tmp is used by any Jmp_
  // instructions. If it is, trace to the Jmp_'s label and use the
  // same procedure as above.
  for (unsigned ji = 0, jn = jmps.size(); ji < jn; ++ji) {
    IRInstruction* jmp = jmps[ji];
    IRInstruction* label = jmp->taken()->front();

    // Figure out which src of the Jmp_ is tmp
    for (unsigned si = 0, sn = jmp->numSrcs(); si < sn; ++si) {
      SSATmp* src = jmp->src(si);
      if (tmp == src) {
        // For now, a DefLabel should never have a register assigned
        // to it before any of its incoming Jmp_ instructions.
        always_assert(m_allocInfo[label->dst(si)].reg(regIndex) ==
                      reg::noreg);
        auto reg = findLabelSrcReg(m_allocInfo, label, si, regIndex);
        if (reg != reg::noreg) return reg;
      }
    }
  }

  return reg::noreg;
}
Beispiel #2
0
/*
 * Insert asserts at various points in the IR.
 * TODO: t2137231 Insert DbgAssertPtr at points that use or produces a GenPtr
 */
static void insertAsserts(IRTrace* trace, IRFactory& factory) {
  forEachTraceBlock(trace, [&](Block* block) {
    for (auto it = block->begin(), end = block->end(); it != end; ) {
      IRInstruction& inst = *it;
      ++it;
      if (inst.op() == SpillStack) {
        insertSpillStackAsserts(inst, factory);
        continue;
      }
      if (inst.op() == Call) {
        SSATmp* sp = inst.dst();
        IRInstruction* addr = factory.gen(LdStackAddr,
                                          inst.marker(),
                                          Type::PtrToGen,
                                          StackOffset(0),
                                          sp);
        insertAfter(&inst, addr);
        insertAfter(addr, factory.gen(DbgAssertPtr, inst.marker(),
                                      addr->dst()));
        continue;
      }
      if (!inst.isBlockEnd()) insertRefCountAsserts(inst, factory);
    }
  });
}
Beispiel #3
0
/**
 * Called to clear out the tracked local values at a call site.
 * Calls kill all registers, so we don't want to keep locals in
 * registers across calls. We do continue tracking the types in
 * locals, however.
 */
void TraceBuilder::killLocalsForCall() {
  auto doKill = [&](smart::vector<LocalState>& locals) {
    for (auto& loc : locals) {
      SSATmp* t = loc.value;
      // should not kill DefConst, and LdConst should be replaced by DefConst
      if (!t || t->inst()->op() == DefConst) continue;

      if (t->inst()->op() == LdConst) {
        // make the new DefConst instruction
        IRInstruction* clone = t->inst()->clone(&m_irFactory);
        clone->setOpcode(DefConst);
        loc.value = clone->dst();
        continue;
      }
      assert(!t->isConst());
      loc.unsafe = true;
    }
  };

  doKill(m_locals);
  m_callerAvailableValues.clear();

  for (auto& state : m_inlineSavedStates) {
    doKill(state->locals);
    state->callerAvailableValues.clear();
  }
}
Beispiel #4
0
/*
 * Insert a DbgAssertTv instruction for each stack location stored to by
 * a SpillStack instruction.
 */
static void insertSpillStackAsserts(IRInstruction& inst, IRFactory* factory) {
  SSATmp* sp = inst.dst();
  auto const vals = inst.srcs().subpiece(2);
  auto* block = inst.block();
  auto pos = block->iteratorTo(&inst); ++pos;
  for (unsigned i = 0, n = vals.size(); i < n; ++i) {
    Type t = vals[i]->type();
    if (t.subtypeOf(Type::Gen)) {
      IRInstruction* addr = factory->gen(LdStackAddr,
                                         Type::PtrToGen,
                                         StackOffset(i),
                                         sp);
      block->insert(pos, addr);
      IRInstruction* check = factory->gen(DbgAssertPtr, addr->dst());
      block->insert(pos, check);
    }
  }
}
/**
 * Called to clear out the tracked local values at a call site.
 * Calls kill all registers, so we don't want to keep locals in
 * registers across calls. We do continue tracking the types in
 * locals, however.
 */
void TraceBuilder::killLocalsForCall() {
    for (auto& loc : m_locals) {
        SSATmp* t = loc.value;
        // should not kill DefConst, and LdConst should be replaced by DefConst
        if (!t || t->inst()->op() == DefConst) continue;

        if (t->inst()->op() == LdConst) {
            // make the new DefConst instruction
            IRInstruction* clone = t->inst()->clone(&m_irFactory);
            clone->setOpcode(DefConst);
            loc.value = clone->dst();
            continue;
        }
        assert(!t->isConst());
        loc.unsafe = true;
    }
}
Beispiel #6
0
/**
 * Called to clear out the tracked local values at a call site.
 * Calls kill all registers, so we don't want to keep locals in
 * registers across calls. We do continue tracking the types in
 * locals, however.
 */
void TraceBuilder::killLocals() {
  for (uint32_t i = 0; i < m_localValues.size(); i++) {
    SSATmp* t = m_localValues[i];
    // should not kill DefConst, and LdConst should be replaced by DefConst
    if (!t || t->inst()->op() == DefConst) {
      continue;
    }
    if (t->inst()->op() == LdConst) {
      // make the new DefConst instruction
      IRInstruction* clone = t->inst()->clone(&m_irFactory);
      clone->setOpcode(DefConst);
      m_localValues[i] = clone->dst();
      continue;
    }
    assert(!t->isConst());
    m_localValues[i] = nullptr;
  }
}
Beispiel #7
0
/*
 * Insert asserts at various points in the IR.
 * TODO: t2137231 Insert DbgAssertPtr at points that use or produces a GenPtr
 */
static void insertAsserts(IRUnit& unit) {
  postorderWalk(unit, [&](Block* block) {
      for (auto it = block->begin(), end = block->end(); it != end; ) {
        IRInstruction& inst = *it;
        ++it;
        if (inst.op() == SpillStack) {
          insertSpillStackAsserts(inst, unit);
          continue;
        }
        if (inst.op() == Call) {
          SSATmp* sp = inst.dst();
          IRInstruction* addr = unit.gen(LdStackAddr,
                                         inst.marker(),
                                         Type::PtrToGen,
                                         StackOffset(0),
                                         sp);
          insertAfter(&inst, addr);
          insertAfter(addr, unit.gen(DbgAssertPtr, inst.marker(), addr->dst()));
          continue;
        }
        if (!inst.isBlockEnd()) insertRefCountAsserts(inst, unit);
      }
    });
}
Beispiel #8
0
void LinearScan::allocRegToInstruction(InstructionList::iterator it) {
  IRInstruction* inst = &*it;
  dumpIR<IRInstruction, kExtraLevel>(inst, "allocating to instruction");

  // Reload all source operands if necessary.
  // Mark registers as unpinned.
  for (int regNo = 0; regNo < kNumRegs; ++regNo) {
    m_regs[regNo].m_pinned = false;
  }
  smart::vector<bool> needsReloading(inst->numSrcs(), true);
  for (uint32_t i = 0; i < inst->numSrcs(); ++i) {
    SSATmp* tmp = inst->src(i);
    int32_t slotId = m_spillSlots[tmp];
    if (slotId == -1) {
      needsReloading[i] = false;
    } else if ((tmp = m_slots[slotId].latestReload)) {
      needsReloading[i] = false;
      inst->setSrc(i, tmp);
    }
    if (!needsReloading[i]) {
      for (int i = 0, n = m_allocInfo[tmp].numAllocatedRegs(); i < n; ++i) {
        m_regs[int(m_allocInfo[tmp].reg(i))].m_pinned = true;
      }
    }
  }
  for (uint32_t i = 0; i < inst->numSrcs(); ++i) {
    if (needsReloading[i]) {
      SSATmp* tmp = inst->src(i);
      int32_t slotId = m_spillSlots[tmp];
      // <tmp> is spilled, and not reloaded.
      // Therefore, We need to reload the value into a new SSATmp.

      // Insert the Reload instruction.
      SSATmp* spillTmp = m_slots[slotId].spillTmp;
      IRInstruction* reload = m_unit.gen(Reload, inst->marker(),
                                              spillTmp);
      inst->block()->insert(it, reload);

      // Create <reloadTmp> which inherits <tmp>'s slot ID and
      // <spillTmp>'s last use ID.
      // Replace <tmp> with <reloadTmp> in <inst>.
      SSATmp* reloadTmp = reload->dst();
      m_uses[reloadTmp].lastUse = m_uses[spillTmp].lastUse;
      m_spillSlots[reloadTmp] = slotId;
      inst->setSrc(i, reloadTmp);
      // reloadTmp and tmp share the same type.  Since it was spilled, it
      // must be using its entire needed-count of registers.
      assert(reloadTmp->type() == tmp->type());
      for (int locIndex = 0; locIndex < tmp->numNeededRegs();) {
        locIndex += allocRegToTmp(reloadTmp, locIndex);
      }
      // Remember this reload tmp in case we can reuse it in later blocks.
      m_slots[slotId].latestReload = reloadTmp;
      dumpIR<IRInstruction, kExtraLevel>(reload, "created reload");
    }
  }

  freeRegsAtId(m_linear[inst]);
  // Update next native.
  if (nextNative() == inst) {
    assert(!m_natives.empty());
    m_natives.pop_front();
    computePreColoringHint();
  }

  Range<SSATmp*> dsts = inst->dsts();
  if (dsts.empty()) return;

  Opcode opc = inst->op();
  if (opc == DefMIStateBase) {
    assert(dsts[0].isA(Type::PtrToCell));
    assignRegToTmp(&m_regs[int(rsp)], &dsts[0], 0);
    return;
  }

  for (SSATmp& dst : dsts) {
    for (int numAllocated = 0, n = dst.numNeededRegs(); numAllocated < n; ) {
      // LdRaw, loading a generator's embedded AR, is the only time we have a
      // pointer to an AR that is not in rVmFp.
      const bool abnormalFramePtr =
        (opc == LdRaw &&
          inst->src(1)->getValInt() == RawMemSlot::ContARPtr);

      // Note that the point of StashGeneratorSP is to save a StkPtr
      // somewhere other than rVmSp.  (TODO(#2288359): make rbx not
      // special.)
      const bool abnormalStkPtr = opc == StashGeneratorSP;

      if (!abnormalStkPtr && dst.isA(Type::StkPtr)) {
        assert(opc == DefSP ||
               opc == ReDefSP ||
               opc == ReDefGeneratorSP ||
               opc == PassSP ||
               opc == DefInlineSP ||
               opc == Call ||
               opc == CallArray ||
               opc == SpillStack ||
               opc == SpillFrame ||
               opc == CufIterSpillFrame ||
               opc == ExceptionBarrier ||
               opc == RetAdjustStack ||
               opc == InterpOne ||
               opc == InterpOneCF ||
               opc == GenericRetDecRefs ||
               opc == CheckStk ||
               opc == GuardStk ||
               opc == AssertStk ||
               opc == CastStk ||
               opc == CoerceStk ||
               opc == SideExitGuardStk  ||
               MInstrEffects::supported(opc));
        assignRegToTmp(&m_regs[int(rVmSp)], &dst, 0);
        numAllocated++;
        continue;
      }
      if (!abnormalFramePtr && dst.isA(Type::FramePtr)) {
        assignRegToTmp(&m_regs[int(rVmFp)], &dst, 0);
        numAllocated++;
        continue;
      }

      // Generally speaking, StkPtrs are pretty special due to
      // tracelet ABI registers. Keep track here of the allowed uses
      // that don't use the above allocation.
      assert(!dst.isA(Type::FramePtr) || abnormalFramePtr);
      assert(!dst.isA(Type::StkPtr) || abnormalStkPtr);

      if (!RuntimeOption::EvalHHIRDeadCodeElim || m_uses[dst].lastUse != 0) {
        numAllocated += allocRegToTmp(&dst, numAllocated);
      } else {
        numAllocated++;
      }
    }
  }
  if (!RuntimeOption::EvalHHIRDeadCodeElim) {
    // if any outputs were unused, free regs now.
    freeRegsAtId(m_linear[inst]);
  }
}
Beispiel #9
0
MemEffects memory_effects_impl(const IRInstruction& inst) {
  switch (inst.op()) {

  //////////////////////////////////////////////////////////////////////
  // Region exits

  // These exits don't leave the current php function, and could head to code
  // that could read or write anything as far as we know (including frame
  // locals).
  case ReqBindJmp:
    return ExitEffects {
      AUnknown,
      stack_below(inst.src(0), inst.extra<ReqBindJmp>()->irSPOff.offset - 1)
    };
  case JmpSwitchDest:
    return ExitEffects {
      AUnknown,
      *stack_below(inst.src(1),
                   inst.extra<JmpSwitchDest>()->irSPOff.offset - 1).
        precise_union(AMIStateAny)
    };
  case JmpSSwitchDest:
    return ExitEffects {
      AUnknown,
      *stack_below(inst.src(1),
                   inst.extra<JmpSSwitchDest>()->offset.offset - 1).
        precise_union(AMIStateAny)
    };
  case ReqRetranslate:
  case ReqRetranslateOpt:
    return UnknownEffects {};

  //////////////////////////////////////////////////////////////////////
  // Unusual instructions

  /*
   * The ReturnHook sets up the ActRec so the unwinder knows everything is
   * already released (i.e. it calls ar->setLocalsDecRefd()).
   *
   * The eval stack is also dead at this point (the return value is passed to
   * ReturnHook as src(1), and the ReturnHook may not access the stack).
   */
  case ReturnHook:
    // Note, this instruction can re-enter, but doesn't need the may_reenter()
    // treatmeant because of the special kill semantics for locals and stack.
    return may_load_store_kill(
      AHeapAny, AHeapAny,
      *AStackAny.precise_union(AFrameAny)->precise_union(AMIStateAny)
    );

  // The suspend hooks can load anything (re-entering the VM), but can't write
  // to frame locals.
  case SuspendHookE:
  case SuspendHookR:
    // TODO: may-load here probably doesn't need to include AFrameAny normally.
    return may_reenter(inst,
                       may_load_store_kill(AUnknown, AHeapAny, AMIStateAny));

  /*
   * If we're returning from a function, it's ReturnEffects.  The RetCtrl
   * opcode also suspends resumables, which we model as having any possible
   * effects.
   *
   * Note that marking AFrameAny as dead isn't quite right, because that
   * ought to mean that the preceding StRetVal is dead; but memory effects
   * ignores StRetVal so the AFrameAny is fine.
   */
  case RetCtrl:
    if (inst.extra<RetCtrl>()->suspendingResumed) {
      // Suspending can go anywhere, and doesn't even kill locals.
      return UnknownEffects {};
    }
    return ReturnEffects {
      AStackAny | AFrameAny | AMIStateAny
    };

  case AsyncRetFast:
  case AsyncRetCtrl:
    if (inst.extra<RetCtrlData>()->suspendingResumed) {
      return UnknownEffects {};
    }
    return ReturnEffects {
      *stack_below(
        inst.src(0),
        inst.extra<RetCtrlData>()->spOffset.offset - 1
      ).precise_union(AMIStateAny)
    };

  case GenericRetDecRefs:
    /*
     * The may-store information here is AUnknown: even though we know it
     * doesn't really "store" to the frame locals, the values that used to be
     * there are no longer available because they are DecRef'd, which we are
     * required to report as may-store information to make it visible to
     * reference count optimizations.  It's conceptually the same as if it was
     * storing an Uninit over each of the locals, but the stores of uninits
     * would be dead so we're not actually doing that.
     */
    return may_reenter(inst,
                       may_load_store_kill(AUnknown, AUnknown, AMIStateAny));

  case EndCatch: {
    auto const stack_kills = stack_below(
      inst.src(1),
      inst.extra<EndCatch>()->offset.offset - 1
    );
    return ExitEffects {
      AUnknown,
      stack_kills | AMIStateTempBase | AMIStateBase
    };
  }

  /*
   * DefInlineFP has some special treatment here.
   *
   * It's logically `publishing' a pointer to a pre-live ActRec, making it
   * live.  It doesn't actually load from this ActRec, but after it's done this
   * the set of things that can load from it is large enough that the easiest
   * way to model this is to consider it as a load on behalf of `publishing'
   * the ActRec.  Once it's published, it's a live activation record, and
   * doesn't get written to as if it were a stack slot anymore (we've
   * effectively converted AStack locations into a frame until the
   * InlineReturn).
   *
   * TODO(#3634984): Additionally, DefInlineFP is marking may-load on all the
   * locals of the outer frame.  This is probably not necessary anymore, but we
   * added it originally because a store sinking prototype needed to know it
   * can't push StLocs past a DefInlineFP, because of reserved registers.
   * Right now it's just here because we need to think about and test it before
   * removing that set.
   */
  case DefInlineFP:
    return may_load_store_kill(
      AFrameAny | inline_fp_frame(&inst),
      /*
       * This prevents stack slots from the caller from being sunk into the
       * callee. Note that some of these stack slots overlap with the frame
       * locals of the callee-- those slots are inacessible in the inlined
       * call as frame and stack locations may not alias.
       */
      stack_below(inst.dst(), 0),
      /*
       * While not required for correctness adding these slots to the kill set
       * will hopefully avoid some extra stores.
       */
      stack_below(inst.dst(), 0)
    );

  case InlineReturn:
    return ReturnEffects { stack_below(inst.src(0), 2) | AMIStateAny };

  case InlineReturnNoFrame:
    return ReturnEffects {
      AliasClass(AStack {
        inst.extra<InlineReturnNoFrame>()->frameOffset.offset,
        std::numeric_limits<int32_t>::max()
      }) | AMIStateAny
    };

  case InterpOne:
    return interp_one_effects(inst);
  case InterpOneCF:
    return ExitEffects {
      AUnknown,
      stack_below(inst.src(1), -inst.marker().spOff().offset - 1) | AMIStateAny
    };

  case NativeImpl:
    return UnknownEffects {};

  // NB: on the failure path, these C++ helpers do a fixup and read frame
  // locals before they throw.  They can also invoke the user error handler and
  // go do whatever they want to non-frame locations.
  //
  // TODO(#5372569): if we combine dv inits into the same regions we could
  // possibly avoid storing KindOfUninits if we modify this.
  case VerifyParamCallable:
  case VerifyParamCls:
  case VerifyParamFail:
    return may_raise(inst, may_load_store(AUnknown, AHeapAny));
  // However the following ones can't read locals from our frame on the way
  // out, except as a side effect of raising a warning.
  case VerifyRetCallable:
  case VerifyRetCls:
    return may_raise(inst, may_load_store(AHeapAny, AHeapAny));
  // In PHP 7 VerifyRetFail can coerce the return type in weak files-- even in
  // a strict file we may still coerce int to float. This is not true of HH
  // files.
  case VerifyRetFail: {
    auto func = inst.marker().func();
    auto mayCoerce =
      RuntimeOption::PHP7_ScalarTypes &&
      !RuntimeOption::EnableHipHopSyntax &&
      !func->unit()->isHHFile();
    auto stores = mayCoerce ? AHeapAny | AStackAny : AHeapAny;
    return may_raise(inst, may_load_store(AHeapAny | AStackAny, stores));
  }

  case CallArray:
    return CallEffects {
      inst.extra<CallArray>()->destroyLocals,
      AMIStateAny,
      // The AStackAny on this is more conservative than it could be; see Call
      // and CallBuiltin.
      AStackAny
    };
  case ContEnter:
    return CallEffects { false, AMIStateAny, AStackAny };

  case Call:
    {
      auto const extra = inst.extra<Call>();
      return CallEffects {
        extra->destroyLocals,
        // kill
        stack_below(inst.src(0), extra->spOffset.offset - 1) | AMIStateAny,
        // We might side-exit inside the callee, and interpret a return.  So we
        // can read anything anywhere on the eval stack above the call's entry
        // depth here.
        AStackAny
      };
    }

  case CallBuiltin:
    {
      auto const extra = inst.extra<CallBuiltin>();
      auto const stk = [&] () -> AliasClass {
        AliasClass ret = AEmpty;
        for (auto i = uint32_t{2}; i < inst.numSrcs(); ++i) {
          if (inst.src(i)->type() <= TPtrToGen) {
            auto const cls = pointee(inst.src(i));
            if (cls.maybe(AStackAny)) {
              ret = ret | cls;
            }
          }
        }
        return ret;
      }();
      auto const locs = extra->destroyLocals ? AFrameAny : AEmpty;
      return may_raise(
        inst, may_load_store_kill(stk | AHeapAny | locs, locs, AMIStateAny));
    }

  // Resumable suspension takes everything from the frame and moves it into the
  // heap.
  case CreateAFWH:
  case CreateAFWHNoVV:
  case CreateCont:
    return may_load_store_move(AFrameAny, AHeapAny, AFrameAny);

  // This re-enters to call extension-defined instance constructors.
  case ConstructInstance:
    return may_reenter(inst, may_load_store(AHeapAny, AHeapAny));

  case CheckStackOverflow:
  case CheckSurpriseFlagsEnter:
  case CheckSurpriseAndStack:
    return may_raise(inst, may_load_store(AEmpty, AEmpty));

  case InitExtraArgs:
    return UnknownEffects {};

  //////////////////////////////////////////////////////////////////////
  // Iterator instructions

  case IterInit:
  case MIterInit:
  case WIterInit:
    return iter_effects(
      inst,
      inst.src(1),
      AFrame { inst.src(1), inst.extra<IterData>()->valId }
    );
  case IterNext:
  case MIterNext:
  case WIterNext:
    return iter_effects(
      inst,
      inst.src(0),
      AFrame { inst.src(0), inst.extra<IterData>()->valId }
    );

  case IterInitK:
  case MIterInitK:
  case WIterInitK:
    {
      AliasClass key = AFrame { inst.src(1), inst.extra<IterData>()->keyId };
      AliasClass val = AFrame { inst.src(1), inst.extra<IterData>()->valId };
      return iter_effects(inst, inst.src(1), key | val);
    }

  case IterNextK:
  case MIterNextK:
  case WIterNextK:
    {
      AliasClass key = AFrame { inst.src(0), inst.extra<IterData>()->keyId };
      AliasClass val = AFrame { inst.src(0), inst.extra<IterData>()->valId };
      return iter_effects(inst, inst.src(0), key | val);
    }

  //////////////////////////////////////////////////////////////////////
  // Instructions that explicitly manipulate locals

  case StLoc:
    return PureStore {
      AFrame { inst.src(0), inst.extra<StLoc>()->locId },
      inst.src(1)
    };

  case StLocRange:
    {
      auto const extra = inst.extra<StLocRange>();
      auto acls = AEmpty;

      for (auto locId = extra->start; locId < extra->end; ++locId) {
        acls = acls | AFrame { inst.src(0), locId };
      }
      return PureStore { acls, inst.src(1) };
    }

  case LdLoc:
    return PureLoad { AFrame { inst.src(0), inst.extra<LocalId>()->locId } };

  case CheckLoc:
  case LdLocPseudoMain:
    // Note: LdLocPseudoMain is both a guard and a load, so it must not be a
    // PureLoad.
    return may_load_store(
      AFrame { inst.src(0), inst.extra<LocalId>()->locId },
      AEmpty
    );

  case StLocPseudoMain:
    // This can store to globals or locals, but we don't have globals supported
    // in AliasClass yet.
    return PureStore { AUnknown, inst.src(1) };

  case ClosureStaticLocInit:
    return may_load_store(AFrameAny, AFrameAny);

  //////////////////////////////////////////////////////////////////////
  // Pointer-based loads and stores

  case LdMem:
    return PureLoad { pointee(inst.src(0)) };
  case StMem:
    return PureStore { pointee(inst.src(0)), inst.src(1) };

  // TODO(#5962341): These take non-constant offset arguments, and are
  // currently only used for collections and class property inits, so we aren't
  // hooked up yet.
  case StElem:
    return PureStore {
      inst.src(0)->type() <= TPtrToRMembCell
        ? AHeapAny
        : AUnknown,
      inst.src(2)
    };
  case LdElem:
    return PureLoad {
      inst.src(0)->type() <= TPtrToRMembCell
        ? AHeapAny
        : AUnknown
    };

  case LdMBase:
    return PureLoad { AMIStateBase };

  case StMBase:
    return PureStore { AMIStateBase, inst.src(0) };

  case FinishMemberOp:
    return may_load_store_kill(AEmpty, AEmpty, AMIStateAny);

  case BoxPtr:
    {
      auto const mem = pointee(inst.src(0));
      return may_load_store(mem, mem);
    }
  case UnboxPtr:
    return may_load_store(pointee(inst.src(0)), AEmpty);

  case IsNTypeMem:
  case IsTypeMem:
  case CheckTypeMem:
  case CheckInitMem:
  case DbgAssertPtr:
    return may_load_store(pointee(inst.src(0)), AEmpty);

  //////////////////////////////////////////////////////////////////////
  // Object/Ref loads/stores

  case CheckRefInner:
    return may_load_store(ARef { inst.src(0) }, AEmpty);
  case LdRef:
    return PureLoad { ARef { inst.src(0) } };
  case StRef:
    return PureStore { ARef { inst.src(0) }, inst.src(1) };

  case InitObjProps:
    return may_load_store(AEmpty, APropAny);

  //////////////////////////////////////////////////////////////////////
  // Array loads and stores

  case InitPackedArray:
    return PureStore {
      AElemI { inst.src(0), inst.extra<InitPackedArray>()->index },
      inst.src(1)
    };

  case LdStructArrayElem:
    assertx(inst.src(1)->strVal()->isStatic());
    return PureLoad { AElemS { inst.src(0), inst.src(1)->strVal() } };

  case InitPackedArrayLoop:
    {
      auto const extra = inst.extra<InitPackedArrayLoop>();
      auto const stack_in = AStack {
        inst.src(1),
        extra->offset.offset + static_cast<int32_t>(extra->size) - 1,
        static_cast<int32_t>(extra->size)
      };
      return may_load_store_move(stack_in, AElemIAny, stack_in);
    }

  case NewStructArray:
    {
      // NewStructArray is reading elements from the stack, but writes to a
      // completely new array, so we can treat the store set as empty.
      auto const extra = inst.extra<NewStructArray>();
      auto const stack_in = AStack {
        inst.src(0),
        extra->offset.offset + static_cast<int32_t>(extra->numKeys) - 1,
        static_cast<int32_t>(extra->numKeys)
      };
      return may_load_store_move(stack_in, AEmpty, stack_in);
    }

  case ArrayIdx:
    return may_load_store(AElemAny | ARefAny, AEmpty);
  case MapIdx:
    return may_load_store(AHeapAny, AEmpty);
  case AKExistsArr:
    return may_load_store(AElemAny, AEmpty);
  case AKExistsObj:
    return may_reenter(inst, may_load_store(AHeapAny, AHeapAny));

  //////////////////////////////////////////////////////////////////////
  // Member instructions

  /*
   * Various minstr opcodes that take a PtrToGen in src 0, which may or may not
   * point to a frame local or the evaluation stack.  These instructions can
   * all re-enter the VM and access arbitrary heap locations, and some of them
   * take pointers to MinstrState locations, which they may both load and store
   * from if present.
   */
  case CGetElem:
  case EmptyElem:
  case IssetElem:
  case SetElem:
  case SetNewElemArray:
  case SetNewElem:
  case UnsetElem:
  case ElemArrayD:
  case ElemArrayU:
    // Right now we generally can't limit any of these better than general
    // re-entry rules, since they can raise warnings and re-enter.
    assertx(inst.src(0)->type() <= TPtrToGen);
    return may_raise(inst, may_load_store(
      AHeapAny | all_pointees(inst),
      AHeapAny | all_pointees(inst)
    ));

  case ElemX:
  case ElemDX:
  case ElemUX:
  case BindElem:
  case BindNewElem:
  case IncDecElem:
  case SetOpElem:
  case SetWithRefElem:
  case SetWithRefNewElem:
  case VGetElem:

    assertx(inst.src(0)->isA(TPtrToGen));
    return minstr_with_tvref(inst);

  /*
   * These minstr opcodes either take a PtrToGen or an Obj as the base.  The
   * pointer may point at frame locals or the stack.  These instructions can
   * all re-enter the VM and access arbitrary non-frame/stack locations, as
   * well.
   */
  case CGetProp:
  case CGetPropQ:
  case EmptyProp:
  case IssetProp:
  case UnsetProp:
  case IncDecProp:
  case SetProp:
    return may_raise(inst, may_load_store(
      AHeapAny | all_pointees(inst),
      AHeapAny | all_pointees(inst)
    ));

  case PropX:
  case PropDX:
  case PropQ:
  case BindProp:
  case SetOpProp:
  case VGetProp:
    return minstr_with_tvref(inst);

  /*
   * Collection accessors can read from their inner array buffer, but stores
   * COW and behave as if they only affect collection memory locations.  We
   * don't track those, so it's returning AEmpty for now.
   */
  case MapIsset:
  case PairIsset:
  case VectorDoCow:
  case VectorIsset:
    return may_load_store(AHeapAny, AEmpty /* Note */);
  case MapGet:
  case MapSet:
    return may_reenter(inst, may_load_store(AHeapAny, AEmpty /* Note */));


  //////////////////////////////////////////////////////////////////////
  // Instructions that allocate new objects, without reading any other memory
  // at all, so any effects they have on some types of memory locations we
  // track are isolated from anything else we care about.

  case NewArray:
  case NewCol:
  case NewInstanceRaw:
  case NewMixedArray:
  case AllocPackedArray:
  case ConvBoolToArr:
  case ConvDblToStr:
  case ConvDblToArr:
  case ConvIntToArr:
  case ConvIntToStr:
  case Box:  // conditional allocation
    return IrrelevantEffects {};

  case AllocObj:
    // AllocObj re-enters to call constructors, but if it weren't for that we
    // could ignore its loads and stores since it's a new object.
    return may_reenter(inst, may_load_store(AEmpty, AEmpty));

  //////////////////////////////////////////////////////////////////////
  // Instructions that explicitly manipulate the stack.

  case LdStk:
    return PureLoad {
      AStack { inst.src(0), inst.extra<LdStk>()->offset.offset, 1 }
    };

  case StStk:
    return PureStore {
      AStack { inst.src(0), inst.extra<StStk>()->offset.offset, 1 },
      inst.src(1)
    };

  case SpillFrame:
    {
      auto const spOffset = inst.extra<SpillFrame>()->spOffset;
      return PureSpillFrame {
        AStack {
          inst.src(0),
          // SpillFrame's spOffset is to the bottom of where it will store the
          // ActRec, but AliasClass needs an offset to the highest cell it will
          // store.
          spOffset.offset + int32_t{kNumActRecCells} - 1,
          int32_t{kNumActRecCells}
        },
        AStack {
          inst.src(0),
          // The context is in the highest slot.
          spOffset.offset + int32_t{kNumActRecCells} - 1,
          1
        }
      };
    }

  case CheckStk:
    return may_load_store(
      AStack { inst.src(0), inst.extra<CheckStk>()->irSpOffset.offset, 1 },
      AEmpty
    );
  case CufIterSpillFrame:
    return may_load_store(AEmpty, AStackAny);

  // The following may re-enter, and also deal with a stack slot.
  case CastStk:
    {
      auto const stk = AStack {
        inst.src(0), inst.extra<CastStk>()->offset.offset, 1
      };
      return may_raise(inst, may_load_store(stk, stk));
    }
  case CoerceStk:
    {
      auto const stk = AStack {
        inst.src(0),
        inst.extra<CoerceStk>()->offset.offset, 1
      };
      return may_raise(inst, may_load_store(stk, stk));
    }

  case CastMem:
  case CoerceMem:
    {
      auto aInst = inst.src(0)->inst();
      if (aInst->is(LdLocAddr)) {
        return may_raise(inst, may_load_store(AFrameAny, AFrameAny));
      }
      return may_raise(inst, may_load_store(AUnknown, AUnknown));
    }

  case LdARFuncPtr:
    // This instruction is essentially a PureLoad, but we don't handle non-TV's
    // in PureLoad so we have to treat it as may_load_store.  We also treat it
    // as loading an entire ActRec-sized part of the stack, although it only
    // loads the slot containing the Func.
    return may_load_store(
      AStack {
        inst.src(0),
        inst.extra<LdARFuncPtr>()->offset.offset + int32_t{kNumActRecCells} - 1,
        int32_t{kNumActRecCells}
      },
      AEmpty
    );

  //////////////////////////////////////////////////////////////////////
  // Instructions that never do anything to memory

  case AssertStk:
  case HintStkInner:
  case AbsDbl:
  case AddDbl:
  case AddInt:
  case AddIntO:
  case AndInt:
  case AssertLoc:
  case AssertType:
  case DefFP:
  case DefSP:
  case EndGuards:
  case EqBool:
  case EqCls:
  case EqDbl:
  case EqInt:
  case GteBool:
  case GteInt:
  case GtBool:
  case GtInt:
  case HintLocInner:
  case Jmp:
  case JmpNZero:
  case JmpZero:
  case LdPropAddr:
  case LdStkAddr:
  case LdPackedArrayElemAddr:
  case LteBool:
  case LteDbl:
  case LteInt:
  case LtBool:
  case LtInt:
  case GtDbl:
  case GteDbl:
  case LtDbl:
  case DivDbl:
  case DivInt:
  case MulDbl:
  case MulInt:
  case MulIntO:
  case NeqBool:
  case NeqDbl:
  case NeqInt:
  case SameObj:
  case NSameObj:
  case EqRes:
  case NeqRes:
  case CmpBool:
  case CmpInt:
  case CmpDbl:
  case SubDbl:
  case SubInt:
  case SubIntO:
  case XorBool:
  case XorInt:
  case OrInt:
  case AssertNonNull:
  case CheckNonNull:
  case CheckNullptr:
  case Ceil:
  case Floor:
  case DefLabel:
  case CheckInit:
  case Nop:
  case Mod:
  case Conjure:
  case Halt:
  case ConvBoolToInt:
  case ConvBoolToDbl:
  case DbgAssertType:
  case DbgAssertFunc:
  case DefConst:
  case LdLocAddr:
  case Sqrt:
  case LdResumableArObj:
  case Shl:
  case Shr:
  case IsNType:
  case IsType:
  case Mov:
  case ConvClsToCctx:
  case ConvDblToBool:
  case ConvDblToInt:
  case IsScalarType:
  case LdMIStateAddr:
  case LdPairBase:
  case LdStaticLocCached:
  case CheckCtxThis:
  case CastCtxThis:
  case LdARNumParams:
  case LdRDSAddr:
  case ExitPlaceholder:
  case CheckRange:
  case ProfileObjClass:
  case LdIfaceMethod:
  case InstanceOfIfaceVtable:
  case CheckARMagicFlag:
  case LdARNumArgsAndFlags:
  case StARNumArgsAndFlags:
  case LdTVAux:
  case StTVAux:
  case LdARInvName:
  case StARInvName:
  case MethodExists:
    return IrrelevantEffects {};

  //////////////////////////////////////////////////////////////////////
  // Instructions that technically do some things w/ memory, but not in any way
  // we currently care about.  They however don't return IrrelevantEffects
  // because we assume (in refcount-opts) that IrrelevantEffects instructions
  // can't even inspect Countable reference count fields, and several of these
  // can.  All GeneralEffects instructions are assumed to possibly do so.

  case DecRefNZ:
  case AFWHBlockOn:
  case IncRef:
  case IncRefCtx:
  case LdClosureCtx:
  case StClosureCtx:
  case StClosureArg:
  case StContArKey:
  case StContArValue:
  case StRetVal:
  case ConvStrToInt:
  case ConvResToInt:
  case OrdStr:
  case CreateSSWH:
  case NewLikeArray:
  case CheckRefs:
  case LdClsCctx:
  case BeginCatch:
  case CheckSurpriseFlags:
  case CheckType:
  case FreeActRec:
  case RegisterLiveObj:
  case StContArResume:
  case StContArState:
  case ZeroErrorLevel:
  case RestoreErrorLevel:
  case CheckCold:
  case CheckInitProps:
  case CheckInitSProps:
  case ContArIncIdx:
  case ContArIncKey:
  case ContArUpdateIdx:
  case ContValid:
  case ContStarted:
  case IncProfCounter:
  case IncStat:
  case IncStatGrouped:
  case CountBytecode:
  case ContPreNext:
  case ContStartedCheck:
  case ConvArrToBool:
  case ConvArrToDbl:
  case ConvArrToInt:
  case NewColFromArray:
  case ConvBoolToStr:
  case CountArray:
  case CountArrayFast:
  case StAsyncArResult:
  case StAsyncArResume:
  case StAsyncArSucceeded:
  case InstanceOf:
  case InstanceOfBitmask:
  case NInstanceOfBitmask:
  case InstanceOfIface:
  case InterfaceSupportsArr:
  case InterfaceSupportsDbl:
  case InterfaceSupportsInt:
  case InterfaceSupportsStr:
  case IsWaitHandle:
  case IsCol:
  case HasToString:
  case DbgAssertRefCount:
  case GtStr:
  case GteStr:
  case LtStr:
  case LteStr:
  case EqStr:
  case NeqStr:
  case SameStr:
  case NSameStr:
  case CmpStr:
  case GtStrInt:
  case GteStrInt:
  case LtStrInt:
  case LteStrInt:
  case EqStrInt:
  case NeqStrInt:
  case CmpStrInt:
  case SameArr:
  case NSameArr:
  case GtRes:
  case GteRes:
  case LtRes:
  case LteRes:
  case CmpRes:
  case IncTransCounter:
  case LdBindAddr:
  case LdAsyncArParentChain:
  case LdSSwitchDestFast:
  case RBTraceEntry:
  case RBTraceMsg:
  case ConvIntToBool:
  case ConvIntToDbl:
  case ConvStrToArr:   // decrefs src, but src is a string
  case ConvStrToBool:
  case ConvStrToDbl:
  case ConvResToDbl:
  case DerefClsRDSHandle:
  case EagerSyncVMRegs:
  case ExtendsClass:
  case LdUnwinderValue:
  case GetCtxFwdCall:
  case LdCtx:
  case LdCctx:
  case LdClosure:
  case LdClsName:
  case LdAFWHActRec:
  case LdClsCtx:
  case LdContActRec:
  case LdContArKey:
  case LdContArValue:
  case LdContField:
  case LdContResumeAddr:
  case LdClsCachedSafe:
  case LdClsInitData:
  case UnwindCheckSideExit:
  case LdCns:
  case LdClsMethod:
  case LdClsMethodCacheCls:
  case LdClsMethodCacheFunc:
  case LdClsMethodFCacheFunc:
  case ProfilePackedArray:
  case ProfileStructArray:
  case ProfileSwitchDest:
  case LdFuncCachedSafe:
  case LdFuncNumParams:
  case LdGblAddr:
  case LdGblAddrDef:
  case LdObjClass:
  case LdObjInvoke:
  case LdStrLen:
  case StringIsset:
  case LdSwitchDblIndex:
  case LdSwitchStrIndex:
  case LdVectorBase:
  case LdWHResult:
  case LdWHState:
  case LookupClsRDSHandle:
  case GetCtxFwdCallDyn:
  case DbgTraceCall:
  case InitCtx:
  case PackMagicArgs:
    return may_load_store(AEmpty, AEmpty);

  // Some that touch memory we might care about later, but currently don't:
  case CheckStaticLocInit:
  case StaticLocInitCached:
  case ColIsEmpty:
  case ColIsNEmpty:
  case ConvCellToBool:
  case ConvObjToBool:
  case CountCollection:
  case LdVectorSize:
  case VectorHasImmCopy:
  case CheckPackedArrayBounds:
  case LdColArray:
  case EnterFrame:
    return may_load_store(AEmpty, AEmpty);

  //////////////////////////////////////////////////////////////////////
  // Instructions that can re-enter the VM and touch most heap things.  They
  // also may generally write to the eval stack below an offset (see
  // alias-class.h above AStack for more).

  case DecRef:
    {
      auto const src = inst.src(0);
      // It could decref the inner ref.
      auto const maybeRef = src->isA(TBoxedCell) ? ARef { src } :
                            src->type().maybe(TBoxedCell) ? ARefAny : AEmpty;
      // Need to add maybeRef to the `store' set. See comments about
      // `GeneralEffects' in memory-effects.h.
      auto const effect = may_load_store(maybeRef, maybeRef);
      if (inst.src(0)->type().maybe(TArr | TObj | TBoxedArr | TBoxedObj)) {
        // Could re-enter to run a destructor.
        return may_reenter(inst, effect);
      }
      return effect;
    }

  case LdArrFPushCuf:  // autoloads
  case LdArrFuncCtx:   // autoloads
  case LdObjMethod:    // can't autoload, but can decref $this right now
  case LdStrFPushCuf:  // autoload
    /*
     * Note that these instructions make stores to a pre-live actrec on the
     * eval stack.
     *
     * It is probably safe for these instructions to have may-load only from
     * the portion of the evaluation stack below the actrec they are
     * manipulating, but since there's always going to be either a Call or a
     * region exit following it it doesn't help us eliminate anything for now,
     * so we just pretend it can read/write anything on the stack.
     */
    return may_raise(inst, may_load_store(AStackAny, AStackAny));

  case LookupClsMethod:   // autoload, and it writes part of the new actrec
    {
      AliasClass effects = AStack {
        inst.src(2),
        inst.extra<LookupClsMethod>()->offset.offset,
        int32_t{kNumActRecCells}
      };
      return may_raise(inst, may_load_store(effects, effects));
    }

  case LdClsPropAddrOrNull:   // may run 86{s,p}init, which can autoload
  case LdClsPropAddrOrRaise:  // raises errors, and 86{s,p}init
  case BaseG:
  case Clone:
  case RaiseArrayIndexNotice:
  case RaiseArrayKeyNotice:
  case RaiseUninitLoc:
  case RaiseUndefProp:
  case RaiseMissingArg:
  case RaiseError:
  case RaiseNotice:
  case RaiseWarning:
  case ConvCellToStr:
  case ConvObjToStr:
  case Count:      // re-enters on CountableClass
  case CIterFree:  // decrefs context object in iter
  case MIterFree:
  case IterFree:
  case GtObj:
  case GteObj:
  case LtObj:
  case LteObj:
  case EqObj:
  case NeqObj:
  case CmpObj:
  case GtArr:
  case GteArr:
  case LtArr:
  case LteArr:
  case EqArr:
  case NeqArr:
  case CmpArr:
  case DecodeCufIter:
  case ConvCellToArr:  // decrefs src, may read obj props
  case ConvCellToObj:  // decrefs src
  case ConvObjToArr:   // decrefs src
  case GenericIdx:
  case InitProps:
  case InitSProps:
  case OODeclExists:
  case LdCls:          // autoload
  case LdClsCached:    // autoload
  case LdFunc:         // autoload
  case LdFuncCached:   // autoload
  case LdFuncCachedU:  // autoload
  case LdSwitchObjIndex:  // decrefs arg
  case LookupClsCns:      // autoload
  case LookupClsMethodCache:  // autoload
  case LookupClsMethodFCache: // autoload
  case LookupCns:
  case LookupCnsE:
  case LookupCnsU:
  case StringGet:      // raise_warning
  case ArrayAdd:       // decrefs source
  case AddElemIntKey:  // decrefs value
  case AddElemStrKey:  // decrefs value
  case AddNewElem:     // decrefs value
  case ArrayGet:       // kVPackedKind warnings
  case ArrayIsset:     // kVPackedKind warnings
  case ArraySet:       // kVPackedKind warnings
  case ArraySetRef:    // kVPackedKind warnings
  case ElemArray:
  case ElemArrayW:
  case GetMemoKey:     // re-enters to call getInstanceKey() in some cases
  case LdClsCtor:
  case ConcatStrStr:
  case PrintStr:
  case PrintBool:
  case PrintInt:
  case ConcatIntStr:
  case ConcatStrInt:
  case LdSSwitchDestSlow:
  case ConvObjToDbl:
  case ConvObjToInt:
  case MapAddElemC:
  case ColAddNewElemC:
  case CoerceStrToInt:
  case CoerceStrToDbl:
  case CoerceCellToDbl:
  case CoerceCellToInt:
  case CoerceCellToBool:
  case ConvCellToInt:
  case ConvResToStr:
  case ConcatStr3:
  case ConcatStr4:
  case ConvCellToDbl:
  case ThrowOutOfBounds:
  case ThrowInvalidOperation:
  case ThrowArithmeticError:
  case ThrowDivisionByZeroError:
    return may_raise(inst, may_load_store(AHeapAny, AHeapAny));

  case ReleaseVVAndSkip:  // can decref ExtraArgs or VarEnv and Locals
    return may_reenter(inst,
                       may_load_store(AHeapAny|AFrameAny, AHeapAny|AFrameAny));

  // These two instructions don't touch memory we track, except that they may
  // re-enter to construct php Exception objects.  During this re-entry anything
  // can happen (e.g. a surprise flag check could cause a php signal handler to
  // run arbitrary code).
  case ABCUnblock:
  case AFWHPrepareChild:
    return may_reenter(inst, may_load_store(AEmpty, AEmpty));

  //////////////////////////////////////////////////////////////////////
  // The following instructions are used for debugging memory optimizations.
  // We can't ignore them, because they can prevent future optimizations;
  // eg t1 = LdStk<N>; DbgTrashStk<N>; StStk<N> t1
  // If we ignore the DbgTrashStk it looks like the StStk is redundant

  case DbgTrashStk:
    return GeneralEffects {
      AEmpty, AEmpty, AEmpty,
      AStack { inst.src(0), inst.extra<DbgTrashStk>()->offset.offset, 1 }
    };
  case DbgTrashFrame:
    return GeneralEffects {
      AEmpty, AEmpty, AEmpty,
      AStack {
        inst.src(0),
        // SpillFrame's spOffset is to the bottom of where it will store the
        // ActRec, but AliasClass needs an offset to the highest cell it will
        // store.
        inst.extra<DbgTrashFrame>()->offset.offset +
          int32_t{kNumActRecCells} - 1,
        int32_t{kNumActRecCells}
      }
    };
  case DbgTrashMem:
    return GeneralEffects {
      AEmpty, AEmpty, AEmpty,
      pointee(inst.src(0))
    };

  //////////////////////////////////////////////////////////////////////

  }

  not_reached();
}