Exemple #1
0
void prepareForNextHHBC(IRGS& env,
                        const NormalizedInstruction* ni,
                        SrcKey newSk,
                        bool lastBcInst) {
  FTRACE(1, "------------------- prepareForNextHHBC ------------------\n");
  env.currentNormalizedInstruction = ni;

  always_assert_flog(
    IMPLIES(isInlining(env), !env.lastBcInst),
    "Tried to end trace while inlining."
  );

  always_assert_flog(
    IMPLIES(isInlining(env), !env.firstBcInst),
    "Inlining while still at the first region instruction."
  );

  always_assert(env.bcStateStack.size() >= env.inlineLevel + 1);
  auto pops = env.bcStateStack.size() - 1 - env.inlineLevel;
  while (pops--) env.bcStateStack.pop_back();

  always_assert_flog(env.bcStateStack.back().func() == newSk.func(),
                     "Tried to update current SrcKey with a different func");

  env.bcStateStack.back().setOffset(newSk.offset());
  updateMarker(env);
  env.lastBcInst = lastBcInst;
  env.catchCreator = nullptr;
  env.irb->prepareForNextHHBC();
}
Exemple #2
0
Block* findMainExitBlock(const IRUnit& unit, SrcKey lastSk) {
  Block* mainExit = nullptr;

  FTRACE(5, "findMainExitBlock: starting on unit:\n{}\n", show(unit));

  for (auto block : rpoSortCfg(unit)) {
    if (endsUnitAtSrcKey(block, lastSk)) {
      if (mainExit == nullptr) {
        mainExit = block;
        continue;
      }

      always_assert_flog(
        mainExit->hint() == Block::Hint::Unlikely ||
        block->hint() == Block::Hint::Unlikely,
        "findMainExit: 2 likely exits found: B{} and B{}\nlastSk = {}",
        mainExit->id(), block->id(), showShort(lastSk));

      if (mainExit->hint() == Block::Hint::Unlikely) mainExit = block;
    }
  }

  always_assert_flog(mainExit, "findMainExit: no exit found for lastSk = {}",
                     showShort(lastSk));

  FTRACE(5, "findMainExitBlock: mainExit = B{}\n", mainExit->id());

  return mainExit;
}
Exemple #3
0
bool check_invariants(const FrameState& state) {
  for (auto id = uint32_t{0}; id < state.locals.size(); ++id) {
    auto const& local = state.locals[id];

    always_assert_flog(
      local.predictedType <= local.type,
      "local {} failed prediction invariants; pred = {}, type = {}\n",
      id,
      local.predictedType,
      local.type
    );

    always_assert_flog(
      local.value == nullptr || local.value->type() == local.type,
      "local {} had type {}, but value {}\n",
      id,
      local.type,
      local.value->toString()
    );

    if (state.curFunc->isPseudoMain()) {
      always_assert_flog(
        local.value == nullptr,
        "We should never be tracking values for locals in a pseudomain "
          "right now.  Local {} had value {}",
        id,
        local.value->toString()
      );
      always_assert_flog(
        local.type == TGen,
        "We should never be tracking non-predicted types for locals in "
          "a pseudomain right now.  Local {} had type {}",
        id,
        local.type.toString()
      );
    }
  }

  // We require the memory stack is always at least as big as the irSPOff,
  // unless irSPOff went negative (because we're returning and have freed the
  // ActRec).  Note that there are some "wasted" slots where locals/iterators
  // would be in the vector right now.
  always_assert_flog(
    state.irSPOff < FPInvOffset{0} ||
    state.stack.size() >= state.irSPOff.offset,
    "stack was smaller than possible"
  );

  return true;
}
Exemple #4
0
Type relaxType(Type t, DataTypeCategory cat) {
  always_assert_flog(t <= TGen && t != TBottom, "t = {}", t);
  if (cat == DataTypeGeneric) return TGen;
  auto const relaxed =
    (t & TCell) <= TBottom ? TBottom : relaxCell(t & TCell, cat);
  return t <= TCell ? relaxed : relaxed | TBoxedInitCell;
}
Exemple #5
0
/*
 * Lookup a catch trace for the given TCA, returning nullptr if none was
 * found. Will abort if a nullptr catch trace was registered, meaning this call
 * isn't allowed to throw.
 */
TCA lookup_catch_trace(TCA rip, _Unwind_Exception* exn) {
  if (auto catchTraceOpt = mcg->getCatchTrace(rip)) {
    if (auto catchTrace = *catchTraceOpt) return catchTrace;

    // A few of our optimization passes must be aware of every path out of
    // the trace, so throwing through jitted code without a catch block is
    // very bad. This is indicated with a present but nullptr entry in the
    // catch trace map.
    const size_t kCallSize = 5;
    const uint8_t kCallOpcode = 0xe8;

    auto callAddr = rip - kCallSize;
    TCA helperAddr = nullptr;
    if (*callAddr == kCallOpcode) {
      helperAddr = rip + *reinterpret_cast<int32_t*>(callAddr + 1);
    }

    always_assert_flog(false,
                       "Translated call to {} threw '{}' without "
                       "catch block, return address: {}\n",
                       getNativeFunctionName(helperAddr),
                       typeInfoFromUnwindException(exn).name(),
                       rip);
  }

  return nullptr;
}
Exemple #6
0
MemEffects memory_effects(const IRInstruction& inst) {
  auto const ret = memory_effects_impl(inst);
  if (debug) {
    // In debug let's do some type checking in case people move instruction
    // argument numbers.
    auto const fp = match<SSATmp*>(
      ret,
      [&] (UnknownEffects)    { return nullptr; },
      [&] (IrrelevantEffects) { return nullptr; },
      [&] (ReadAllLocals)     { return nullptr; },
      [&] (KillFrameLocals l) { return l.fp; },
      [&] (ReadLocal l)       { return l.fp; },
      [&] (ReadLocal2 l)      { return l.fp; },
      [&] (StoreLocal l)      { return l.fp; },
      [&] (StoreLocalNT l)    { return l.fp; }
    );
    if (fp != nullptr) {
      always_assert_flog(
        fp->type() <= Type::FramePtr,
        "Non frame pointer in memory effects:\n  inst: {}\n  effects: {}",
        inst.toString(),
        show(ret)
      );
    }
  }
  return ret;
}
Exemple #7
0
/*
 * MOpFlags is a bitmask so it doesn't fit into the [0,n) pattern of the other
 * subops above.
 */
const char* subopToName(MOpFlags f) {
  switch (f) {
#define FLAG(name, val) case MOpFlags::name: return #name;
  M_OP_FLAGS
#undef FLAG
  }
  always_assert_flog(false, "Invalid MOpFlags: {}", uint8_t(f));
}
Exemple #8
0
void safe_cast_failure(const std::string& valStr,
                       const char* fn,
                       const char* what) {
  always_assert_flog(
    false,
    "conversion of {} failed in {} : {}\n",
    valStr, fn, what
  );
}
Vptr lookupDestructor(Vout& v, Vreg type) {
  auto const table = reinterpret_cast<intptr_t>(g_destructors);
  always_assert_flog(deltaFits(table, sz::dword),
    "Destructor function table is expected to be in the data "
    "segment, with addresses less than 2^31"
  );
  auto index = v.makeReg();
  v << shrli{kShiftDataTypeToDestrIndex, type, index, v.makeReg()};
  return baseless(index * 8 + safe_cast<int>(table));
}
Exemple #10
0
TCA emitFuncPrologue(Func* func, int argc, TransKind kind) {
  try {
    return emitFuncPrologueImpl(func, argc, kind);
  } catch (const DataBlockFull& dbFull) {

    // Fail hard if the block isn't code.hot.
    always_assert_flog(dbFull.name == "hot",
                       "data block = {}\nmessage: {}\n",
                       dbFull.name, dbFull.what());

    // Otherwise, fall back to code.main and retry.
    code().disableHot();
    try {
      return emitFuncPrologueImpl(func, argc, kind);
    } catch (const DataBlockFull& dbFull) {
      always_assert_flog(0, "data block = {}\nmessage: {}\n",
                         dbFull.name, dbFull.what());
    }
  }
}
Exemple #11
0
void FrameState::refineLocalType(uint32_t id, Type type, SSATmp* typeSource) {
  always_assert(id < m_locals.size());
  auto& local = m_locals[id];
  Type newType = refineType(local.type, type);
  ITRACE(2, "updating local {}'s type: {} -> {}\n",
         id, local.type, newType);
  always_assert_flog(newType != Type::Bottom,
                     "Bad new type for local {}: {} & {} = {}",
                     id, local.type, type, newType);
  local.type = newType;
  local.typeSource = typeSource;
}
Exemple #12
0
bool install_catch_trace(_Unwind_Context* ctx, _Unwind_Exception* exn,
                         bool do_side_exit, TypedValue unwinder_tv) {
  auto const rip = (TCA)_Unwind_GetIP(ctx);
  auto catchTraceOpt = mcg->getCatchTrace(rip);
  FTRACE(1, "No catch trace entry for ip {}; bailing\n", rip);
  if (!catchTraceOpt) return false;

  auto catchTrace = *catchTraceOpt;
  if (!catchTrace) {
    // A few of our optimization passes must be aware of every path out of the
    // trace, so throwing through jitted code without a catch block is very
    // bad. This is indicated with a present but nullptr entry in the catch
    // trace map.
    const size_t kCallSize = 5;
    const uint8_t kCallOpcode = 0xe8;

    auto callAddr = rip - kCallSize;
    TCA helperAddr = nullptr;
    std::string helperName;
    if (*callAddr == kCallOpcode) {
      helperAddr = rip + *reinterpret_cast<int32_t*>(callAddr + 1);
    }

    always_assert_flog(false,
                       "Translated call to {} threw '{}' without "
                       "catch block, return address: {}\n",
                       getNativeFunctionName(helperAddr),
                       exceptionFromUnwindException(exn)->what(),
                       rip);
    return false;
  }

  FTRACE(1, "installing catch trace {} for call {} with tv {}, "
         "returning _URC_INSTALL_CONTEXT\n",
         catchTrace, rip, unwinder_tv.pretty());

  // In theory the unwind api will let us set registers in the frame
  // before executing our landing pad. In practice, trying to use
  // their recommended scratch registers results in a SEGV inside
  // _Unwind_SetGR, so we pass things to the handler using the
  // RDS. This also simplifies the handler code because it doesn't
  // have to worry about saving its arguments somewhere while
  // executing the exit trace.
  unwindRdsInfo->unwinderScratch = (int64_t)exn;
  unwindRdsInfo->doSideExit = do_side_exit;
  if (do_side_exit) {
    unwindRdsInfo->unwinderTv = unwinder_tv;
  }
  _Unwind_SetIP(ctx, (uint64_t)catchTrace);
  tl_regState = VMRegState::DIRTY;

  return true;
}
Exemple #13
0
pid_t LightProcess::proc_open(const char *cmd, const std::vector<int> &created,
                              const std::vector<int> &desired,
                              const char *cwd,
                              const std::vector<std::string> &env) {
  int id = GetId();
  Lock lock(g_procs[id].m_procMutex);
  always_assert(Available());
  always_assert(created.size() == desired.size());

  auto fout = g_procs[id].m_afdt_fd;
  lwp_write(fout, "proc_open");
  lwp_write(fout, cmd);
  lwp_write(fout, cwd ? cwd : "");
  lwp_write_int32(fout, (int)env.size());
  for (unsigned int i = 0; i < env.size(); i++) {
    lwp_write(fout, env[i]);
  }

  lwp_write_int32(fout, (int)created.size());
  for (unsigned int i = 0; i < desired.size(); i++) {
    lwp_write_int32(fout, desired[i]);
  }

  bool error_send = false;
  int save_errno = 0;
  for (unsigned int i = 0; i < created.size(); i++) {
    if (!send_fd(g_procs[id].m_afdt_fd, created[i])) {
      error_send = true;
      save_errno = errno;
      break;
    }
  }

  std::string buf;
  auto fin = g_procs[id].m_afdt_fd;
  lwp_read(fin, buf);
  if (buf == "error") {
    lwp_read_int32(fin, errno);
    if (error_send) {
      // On this error, the receiver side returns dummy errno,
      // use the sender side errno here.
      errno = save_errno;
    }
    return -1;
  }
  always_assert_flog(buf == "success",
                     "Unexpected message from light process: `{}'", buf);
  int64_t pid = -1;
  lwp_read_int64(fin, pid);
  always_assert(pid);
  return (pid_t)pid;
}
Exemple #14
0
GuardConstraint relaxConstraint(GuardConstraint origGc,
                                Type knownType, Type toRelax) {
  ITRACE(4, "relaxConstraint({}, knownType = {}, toRelax = {})\n",
         origGc, knownType, toRelax);
  Trace::Indent _i;

  // AssertType can be given TCtx, which should never relax.
  if (toRelax.maybe(TCctx)) {
    always_assert(toRelax <= TCtx);
    return origGc;
  }

  auto const dstType = knownType & toRelax;
  always_assert_flog(typeFitsConstraint(dstType, origGc),
                     "refine({}, {}) doesn't fit {}",
                     knownType, toRelax, origGc);

  // Preserve origGc's weak property.
  GuardConstraint newGc{DataTypeGeneric};
  newGc.weak = origGc.weak;

  while (true) {
    if (newGc.isSpecialized()) {
      // We need to ask for the right kind of specialization, so grab it from
      // origGc.
      if (origGc.wantArrayKind()) newGc.setWantArrayKind();
      if (origGc.wantClass()) newGc.setDesiredClass(origGc.desiredClass());
    }

    auto const relaxed = relaxType(toRelax, newGc.category);
    auto const newDstType = relaxed & knownType;
    if (typeFitsConstraint(newDstType, origGc)) break;

    ITRACE(5, "newDstType = {}, newGc = {}; incrementing constraint\n",
      newDstType, newGc);
    incCategory(newGc.category);
  }
  // DataTypeCountness can be relaxed to DataTypeGeneric in
  // optimizeProfiledGuards, so we can't rely on this category to give type
  // information through guards.  Since relaxConstraint is used to relax the
  // DataTypeCategory for guards, we cannot return DataTypeCountness unless we
  // already had it to start with.  Instead, we return DataTypeBoxCountness,
  // which won't be further relaxed by optimizeProfiledGuards.
  if (newGc.category == DataTypeCountness && origGc != DataTypeCountness) {
    newGc.category = DataTypeBoxAndCountness;
  }
  ITRACE(4, "Returning {}\n", newGc);
  // newGc shouldn't be any more specific than origGc.
  always_assert(newGc.category <= origGc.category);
  return newGc;
}
Exemple #15
0
/*
 * Build the CFG, then the dominator tree, then use it to validate SSA.
 * 1. Each src must be defined by some other instruction, and each dst must
 *    be defined by the current instruction.
 * 2. Each src must be defined earlier in the same block or in a dominator.
 * 3. Each dst must not be previously defined.
 * 4. Treat tmps defined by DefConst as always defined.
 * 5. Each predecessor of a reachable block must be reachable (deleted
 *    blocks must not have out-edges to reachable blocks).
 * 6. The entry block must not have any predecessors.
 * 7. The entry block starts with a DefFP instruction.
 */
bool checkCfg(const IRUnit& unit) {
  auto const blocksIds = rpoSortCfgWithIds(unit);
  auto const& blocks = blocksIds.blocks;
  jit::hash_set<const Edge*> edges;

  // Entry block can't have predecessors.
  always_assert(unit.entry()->numPreds() == 0);

  // Entry block starts with DefFP
  always_assert(!unit.entry()->empty() &&
                unit.entry()->begin()->op() == DefFP);

  // Check valid successor/predecessor edges.
  for (Block* b : blocks) {
    auto checkEdge = [&] (const Edge* e) {
      always_assert(e->from() == b);
      edges.insert(e);
      for (auto& p : e->to()->preds()) if (&p == e) return;
      always_assert(false); // did not find edge.
    };
    checkBlock(b);
    if (auto *e = b->nextEdge())  checkEdge(e);
    if (auto *e = b->takenEdge()) checkEdge(e);
  }
  for (Block* b : blocks) {
    for (auto const &e : b->preds()) {
      always_assert(&e == e.inst()->takenEdge() || &e == e.inst()->nextEdge());
      always_assert(e.to() == b);
    }
  }

  // Visit every instruction and make sure their sources are defined in a block
  // that dominates the block containing the instruction.
  auto const idoms = findDominators(unit, blocksIds);
  forEachInst(blocks, [&] (const IRInstruction* inst) {
    for (auto src : inst->srcs()) {
      if (src->inst()->is(DefConst)) continue;
      auto const dom = findDefiningBlock(src);
      always_assert_flog(
        dom && dominates(dom, inst->block(), idoms),
        "src '{}' in '{}' came from '{}', which is not a "
        "DefConst and is not defined at this use site",
        src->toString(), inst->toString(),
        src->inst()->toString()
      );
    }
  });

  return true;
}
Exemple #16
0
AStack::AStack(SSATmp* base, int32_t o, int32_t s)
  : offset(o), size(s)
{
  // Always canonicalize to the outermost frame pointer.
  if (base->isA(TStkPtr)) {
    auto const defSP = base->inst();
    always_assert_flog(defSP->is(DefSP),
                       "unexpected StkPtr: {}\n", base->toString());
    offset -= defSP->extra<DefSP>()->offset.offset;
    return;
  }

  assertx(base->isA(TFramePtr));
  auto const defInlineFP = base->inst();
  if (defInlineFP->is(DefInlineFP)) {
    auto const sp = defInlineFP->src(0)->inst();
    offset += defInlineFP->extra<DefInlineFP>()->spOffset.offset;
    offset -= sp->extra<DefSP>()->offset.offset;
    always_assert_flog(sp->src(0)->inst()->is(DefFP),
                       "failed to canonicalize to outermost FramePtr: {}\n",
                       sp->src(0)->toString());
  }
}
Exemple #17
0
Block* findMainExitBlock(const IRUnit& unit, SrcKey lastSk) {
  bool unreachable = false;
  Block* mainExit = nullptr;

  FTRACE(5, "findMainExitBlock: looking for exit at {} in unit:\n{}\n",
         showShort(lastSk), show(unit));

  for (auto block : rpoSortCfg(unit)) {
    if (block->back().is(Unreachable)) unreachable = true;

    if (endsUnitAtSrcKey(block, lastSk)) {
      if (mainExit == nullptr) {
        mainExit = block;
        continue;
      }

      always_assert_flog(
        mainExit->hint() == Block::Hint::Unlikely ||
        block->hint() == Block::Hint::Unlikely,
        "findMainExit: 2 likely exits found: B{} and B{}\nlastSk = {}",
        mainExit->id(), block->id(), showShort(lastSk)
      );

      if (mainExit->hint() == Block::Hint::Unlikely) mainExit = block;
    }
  }

  always_assert_flog(
    mainExit || unreachable,
    "findMainExit: no exit found for lastSk = {}",
    showShort(lastSk)
  );

  FTRACE(5, "findMainExitBlock: mainExit = B{}\n", mainExit->id());

  return mainExit;
}
Exemple #18
0
TCA emit_bindjcc1st_stub(CodeBlock& cb, FPInvOffset spOff, TCA jcc,
                         SrcKey taken, SrcKey next, ConditionCode cc) {
  always_assert_flog(taken.resumed() == next.resumed(),
                     "bind_jcc_1st was confused about resumables");
  return emit_ephemeral(
    cb,
    mcg->getFreeStub(cb, &mcg->cgFixups()),
    taken.resumed() ? folly::none : folly::make_optional(spOff),
    REQ_BIND_JCC_FIRST,
    jcc,
    taken.toAtomicInt(),
    next.toAtomicInt(),
    cc
  );
}
Exemple #19
0
Vptr lookupDestructor(Vout& v, Vreg type) {
  auto const table = reinterpret_cast<intptr_t>(g_destructors);
  always_assert_flog(deltaFits(table, sz::dword),
    "Destructor function table is expected to be in the data "
    "segment, with addresses less than 2^31"
  );
  static_assert((KindOfString   >> kShiftDataTypeToDestrIndex == 1) &&
                (KindOfArray    >> kShiftDataTypeToDestrIndex == 2) &&
                (KindOfObject   >> kShiftDataTypeToDestrIndex == 3) &&
                (KindOfResource >> kShiftDataTypeToDestrIndex == 4) &&
                (KindOfRef      >> kShiftDataTypeToDestrIndex == 5),
                "lookup of destructors depends on KindOf* values");
  auto index = v.makeReg();
  v << shrli{kShiftDataTypeToDestrIndex, type, index, v.makeReg()};
  return baseless(index * 8 + safe_cast<int>(table));
}
Exemple #20
0
Type refineType(Type oldType, Type newType) {
  // It's OK for the old and new inner types of boxed values not to
  // intersect, since the inner type is really just a prediction.
  // But if they do intersect, we keep the intersection.  This is
  // necessary to keep the type known in situations like:
  //   oldType: Boxed{Obj}
  //   newType: Boxed{Obj<C>, InitNull}
  if (oldType.isBoxed() && newType.isBoxed() && oldType.not(newType)) {
    return oldType < newType ? oldType : newType;
  }

  auto const result = oldType & newType;
  always_assert_flog(result != Type::Bottom,
                     "refineType({}, {}) failed", oldType, newType);
  return result;
}
Exemple #21
0
/*
 * relaxConstraint returns the least specific TypeConstraint 'tc' that doesn't
 * prevent the intersection of knownType and relaxType(toRelax, tc) from
 * satisfying origTc. It is used in IRBuilder::constrainValue and
 * IRBuilder::constrainStack to determine how to constrain the typeParam and
 * src values of CheckType/CheckStk instructions, and the src values of
 * AssertType/AssertStk instructions.
 *
 * AssertType example:
 * t24:Obj<C> = AssertType<{Obj<C>|InitNull}> t4:Obj
 *
 * If constrainValue is called with (t24, DataTypeSpecialized), relaxConstraint
 * will be called with (DataTypeSpecialized, Obj<C>|InitNull, Obj). After a few
 * iterations it will determine that constraining Obj with DataTypeCountness
 * will still allow the result type of the AssertType instruction to satisfy
 * DataTypeSpecialized, because relaxType(Obj, DataTypeCountness) == Obj.
 */
TypeConstraint relaxConstraint(const TypeConstraint origTc,
                               const Type knownType, const Type toRelax) {
  ITRACE(4, "relaxConstraint({}, knownType = {}, toRelax = {})\n",
         origTc, knownType, toRelax);
  Trace::Indent _i;

  auto const dstType = refineType(knownType, toRelax);
  always_assert_flog(typeFitsConstraint(dstType, origTc),
                     "refine({}, {}) doesn't fit {}",
                     knownType, toRelax, origTc);

  // Preserve origTc's weak property.
  TypeConstraint newTc{DataTypeGeneric, DataTypeGeneric};
  newTc.weak = origTc.weak;

  while (true) {
    if (newTc.isSpecialized()) {
      // We need to ask for the right kind of specialization, so grab it from
      // origTc.
      if (origTc.wantArrayKind()) newTc.setWantArrayKind();
      if (origTc.wantClass()) newTc.setDesiredClass(origTc.desiredClass());
    }

    auto const relaxed = relaxType(toRelax, newTc);
    auto const newDstType = refineType(relaxed, knownType);
    if (typeFitsConstraint(newDstType, origTc)) break;

    ITRACE(5, "newDstType = {}, newTc = {}; ", newDstType, newTc);
    if (newTc.category == DataTypeGeneric ||
        !typeFitsOuterConstraint(newDstType, origTc)) {
      FTRACE(5, "incrementing outer\n");
      incCategory(newTc.category);
    } else if (!typeFitsInnerConstraint(newDstType, origTc)) {
      FTRACE(5, "incrementing inner\n");
      incCategory(newTc.innerCat);
    } else {
      not_reached();
    }
  }

  ITRACE(4, "Returning {}\n", newTc);
  // newTc shouldn't be any more specific than origTc.
  always_assert(newTc.category <= origTc.category &&
                newTc.innerCat <= origTc.innerCat);
  return newTc;
}
Exemple #22
0
/*
 * Modify a GeneralEffects to take potential VM re-entry into account.  This
 * affects may-load, may-store, and kills information for the instruction.  The
 * GeneralEffects should already contain AHeapAny in both loads and stores if
 * it affects those locations for reasons other than re-entry, but does not
 * need to if it doesn't.
 *
 * For loads, we need to take into account EnableArgsInBacktraces: if this flag
 * is on, any instruction that could re-enter could call debug_backtrace, which
 * could read the argument locals of any activation record in the callstack.
 * We don't try to limit the load effects to argument locals here, though, and
 * just union in all the locals.
 *
 * For kills, locations on the eval stack below the re-entry depth should all
 * be added.
 *
 * Important note: because of the `kills' set modifications, an instruction may
 * not report that it can re-enter if it actually can't.  The reason this can
 * go wrong is that if the instruction was in an inlined function, if we've
 * removed the DefInlineFP its spOff will not be meaningful (unless it's a
 * DecRef instruction, which we explicitly adjust in dce.cpp).  In this case
 * the `kills' set will refer to the wrong stack locations.  In general this
 * means instructions that can re-enter must have catch traces---but a few
 * other instructions are exceptions, either since they are not allowed in
 * inlined functions or because they take the (possibly-inlined) FramePtr as a
 * source.
 */
GeneralEffects may_reenter(const IRInstruction& inst, GeneralEffects x) {
  auto const may_reenter_is_ok =
    (inst.taken() && inst.taken()->isCatch()) ||
    inst.is(DecRef,
            ReleaseVVAndSkip,
            CIterFree,
            MIterFree,
            MIterNext,
            MIterNextK,
            IterFree,
            ABCUnblock,
            GenericRetDecRefs);
  always_assert_flog(
    may_reenter_is_ok,
    "instruction {} claimed may_reenter, but it isn't allowed to say that",
    inst
  );

  /*
   * We want to union `killed_stack' into whatever else the instruction already
   * said it must kill, but if we end up with an unrepresentable AliasClass we
   * can't return a set that's too big (the `kills' set is unlike the other
   * AliasClasses in GeneralEffects in that means it kills /everything/ in the
   * set, since it's must-information).
   *
   * If we can't represent the union, just take the stack, in part because we
   * have some debugging asserts about this right now---but also nothing
   * actually uses may_reenter with a non-AEmpty kills at the time of this
   * writing anyway.
   */
  auto const killed_stack =
    stack_below(inst.marker().fp(), -inst.marker().spOff().offset - 1);
  auto const kills_union = x.kills.precise_union(killed_stack);
  auto const new_kills = kills_union ? *kills_union : killed_stack;

  return GeneralEffects {
    x.loads | AHeapAny
            | (RuntimeOption::EnableArgsInBacktraces ? AFrameAny : AEmpty),
    x.stores | AHeapAny,
    x.moves,
    new_kills
  };
}
Exemple #23
0
DataType Type::toDataType() const {
  assertx(!maybe(TPtrToGen) || m_bits == kBottom);
  assertx(isKnownDataType());

  // Order is important here: types must progress from more specific
  // to less specific to return the most specific DataType.
  if (*this <= TUninit)      return KindOfUninit;
  if (*this <= TInitNull)    return KindOfNull;
  if (*this <= TBool)        return KindOfBoolean;
  if (*this <= TInt)         return KindOfInt64;
  if (*this <= TDbl)         return KindOfDouble;
  if (*this <= TStaticStr)   return KindOfStaticString;
  if (*this <= TStr)         return KindOfString;
  if (*this <= TArr)         return KindOfArray;
  if (*this <= TObj)         return KindOfObject;
  if (*this <= TRes)         return KindOfResource;
  if (*this <= TBoxedCell)   return KindOfRef;
  if (*this <= TCls)         return KindOfClass;
  always_assert_flog(false,
                     "Bad Type {} in Type::toDataType()", *this);
}
Exemple #24
0
DataType Type::toDataType() const {
  assert(!isPtr());
  assert(isKnownDataType());

  // Order is important here: types must progress from more specific
  // to less specific to return the most specific DataType.
  if (subtypeOf(Uninit))        return KindOfUninit;
  if (subtypeOf(InitNull))      return KindOfNull;
  if (subtypeOf(Bool))          return KindOfBoolean;
  if (subtypeOf(Int))           return KindOfInt64;
  if (subtypeOf(Dbl))           return KindOfDouble;
  if (subtypeOf(StaticStr))     return KindOfStaticString;
  if (subtypeOf(Str))           return KindOfString;
  if (subtypeOf(Arr))           return KindOfArray;
  if (subtypeOf(Obj))           return KindOfObject;
  if (subtypeOf(Res))           return KindOfResource;
  if (subtypeOf(BoxedCell))     return KindOfRef;
  if (subtypeOf(Cls))           return KindOfClass;
  always_assert_flog(false,
                     "Bad Type {} in Type::toDataType()", *this);
}
Exemple #25
0
Block* findDefiningBlock(const SSATmp* t, const IdomVector& idoms) {
  assertx(!t->inst()->is(DefConst));
  auto const srcInst = t->inst();

  if (srcInst->hasEdges()) {
    auto const next = srcInst->next();
    UNUSED auto const taken = srcInst->taken();
    always_assert_flog(
      next && taken,
      "hasEdges instruction defining a dst had no edges:\n  {}\n",
      srcInst->toString()
    );
    for (const auto& arc : next->preds()) {
      auto pred = arc.from();
      if (pred != srcInst->block() && !dominates(next, pred, idoms)) {
        return nullptr;
      }
    }
    return next;
  }

  return srcInst->block();
}
Exemple #26
0
void Extension::CompileSystemlib(const std::string &slib,
                                 const std::string &name) {
  // TODO (t3443556) Bytecode repo compilation expects that any errors
  // encountered during systemlib compilation have valid filename pointers
  // which won't be the case for now unless these pointers are long-lived.
  auto const moduleName = makeStaticString(name.c_str());
  auto const unit = compile_systemlib_string(slib.c_str(), slib.size(),
                                             moduleName->data());
  always_assert_flog(unit, "No unit created for systemlib `{}'", name);

  const StringData* msg;
  int line;
  if (unit->compileTimeFatal(msg, line) ||
      unit->parseFatal(msg, line)) {
    std::fprintf(stderr, "Systemlib `%s' contains a fataling unit: %s, %d\n",
                 name.c_str(),
                 msg->data(),
                 line);
    _Exit(0);
  }

  unit->merge();
  s_systemlib_units.push_back(unit);
}
Exemple #27
0
bool dontGuardAnyInputs(Op op) {
  switch (op) {
  case Op::IterBreak:
  case Op::DecodeCufIter:
  case Op::IterNext:
  case Op::IterNextK:
  case Op::WIterInit:
  case Op::WIterInitK:
  case Op::WIterNext:
  case Op::WIterNextK:
  case Op::MIterInit:
  case Op::MIterInitK:
  case Op::MIterNext:
  case Op::MIterNextK:
  case Op::IterInitK:
  case Op::IterInit:
  case Op::JmpZ:
  case Op::JmpNZ:
  case Op::Jmp:
  case Op::JmpNS:
  case Op::FCallArray:
  case Op::FCall:
  case Op::FCallD:
  case Op::FCallAwait:
  case Op::ClsCnsD:
  case Op::FPassCW:
  case Op::FPassCE:
  case Op::FPassR:
  case Op::FPassV:
  case Op::FPassG:
  case Op::FPassL:
  case Op::FPassS:
  case Op::FCallBuiltin:
  case Op::NewStructArray:
  case Op::Switch:
  case Op::SSwitch:
  case Op::Lt:
  case Op::Lte:
  case Op::Gt:
  case Op::Gte:
  case Op::Cmp:
  case Op::SetOpL:
  case Op::InitProp:
  case Op::BreakTraceHint:
  case Op::IsTypeL:
  case Op::IsTypeC:
  case Op::IncDecL:
  case Op::DefCls:
  case Op::FPushCuf:
  case Op::FPushCufF:
  case Op::FPushCufSafe:
  case Op::IncStat:
  case Op::Eq:
  case Op::Neq:
  case Op::AssertRATL:
  case Op::AssertRATStk:
  case Op::SetL:
  case Op::BindL:
  case Op::EmptyL:
  case Op::CastBool:
  case Op::Same:
  case Op::NSame:
  case Op::Yield:
  case Op::YieldK:
  case Op::ContEnter:
  case Op::ContRaise:
  case Op::CreateCont:
  case Op::Await:
  case Op::BitAnd:
  case Op::BitOr:
  case Op::BitXor:
  case Op::Sub:
  case Op::Mul:
  case Op::SubO:
  case Op::MulO:
  case Op::Add:
  case Op::AddO:
  case Op::AGetC:
  case Op::AGetL:
  case Op::AKExists:
  case Op::AddElemC:
  case Op::AddNewElemC:
  case Op::Array:
  case Op::ArrayIdx:
  case Op::BareThis:
  case Op::BindG:
  case Op::BindS:
  case Op::BitNot:
  case Op::CGetG:
  case Op::CGetQuietG:
  case Op::CGetL:
  case Op::CGetQuietL:
  case Op::CGetL2:
  case Op::CGetS:
  case Op::CUGetL:
  case Op::CIterFree:
  case Op::CastArray:
  case Op::CastDouble:
  case Op::CastInt:
  case Op::CastObject:
  case Op::CastString:
  case Op::CheckProp:
  case Op::CheckThis:
  case Op::Clone:
  case Op::Cns:
  case Op::CnsE:
  case Op::CnsU:
  case Op::MapAddElemC:
  case Op::ColAddNewElemC:
  case Op::ColFromArray:
  case Op::ConcatN:
  case Op::Concat:
  case Op::ContCheck:
  case Op::ContCurrent:
  case Op::ContKey:
  case Op::ContValid:
  case Op::ContStarted:
  case Op::ContGetReturn:
  case Op::CreateCl:
  case Op::DefCns:
  case Op::DefFunc:
  case Op::Dir:
  case Op::Div:
  case Op::Double:
  case Op::Dup:
  case Op::EmptyG:
  case Op::EmptyS:
  case Op::FPushClsMethodD:
  case Op::FPushClsMethod:
  case Op::FPushClsMethodF:
  case Op::FPushCtor:
  case Op::FPushCtorD:
  case Op::FPushCufIter:
  case Op::FPushFunc:
  case Op::FPushFuncD:
  case Op::FPushFuncU:
  case Op::FPushObjMethodD:
  case Op::False:
  case Op::File:
  case Op::GetMemoKey:
  case Op::Idx:
  case Op::InitThisLoc:
  case Op::InstanceOf:
  case Op::InstanceOfD:
  case Op::Int:
  case Op::IssetG:
  case Op::IssetL:
  case Op::IssetS:
  case Op::IterFree:
  case Op::LateBoundCls:
  case Op::MIterFree:
  case Op::Mod:
  case Op::Pow:
  case Op::NameA:
  case Op::NativeImpl:
  case Op::NewArray:
  case Op::NewCol:
  case Op::NewLikeArrayL:
  case Op::NewMixedArray:
  case Op::NewDictArray:
  case Op::NewPackedArray:
  case Op::NewVecArray:
  case Op::Not:
  case Op::Null:
  case Op::NullUninit:
  case Op::OODeclExists:
  case Op::Parent:
  case Op::PopA:
  case Op::PopC:
  case Op::PopR:
  case Op::PopV:
  case Op::Print:
  case Op::PushL:
  case Op::RetC:
  case Op::RetV:
  case Op::Self:
  case Op::SetG:
  case Op::SetS:
  case Op::Shl:
  case Op::Shr:
  case Op::Silence:
  case Op::StaticLoc:
  case Op::StaticLocInit:
  case Op::String:
  case Op::This:
  case Op::True:
  case Op::Unbox:
  case Op::UnboxR:
  case Op::UnsetL:
  case Op::VGetG:
  case Op::VGetL:
  case Op::VGetS:
  case Op::VerifyParamType:
  case Op::VerifyRetTypeC:
  case Op::VerifyRetTypeV:
  case Op::WHResult:
  case Op::Xor:
  case Op::BaseNC:
  case Op::BaseNL:
  case Op::BaseGC:
  case Op::BaseGL:
  case Op::FPassBaseNC:
  case Op::FPassBaseNL:
  case Op::FPassBaseGC:
  case Op::FPassBaseGL:
  case Op::BaseSC:
  case Op::BaseSL:
  case Op::BaseL:
  case Op::FPassBaseL:
  case Op::BaseC:
  case Op::BaseR:
  case Op::BaseH:
  case Op::Dim:
  case Op::FPassDim:
  case Op::QueryM:
  case Op::VGetM:
  case Op::FPassM:
  case Op::SetM:
  case Op::IncDecM:
  case Op::SetOpM:
  case Op::BindM:
  case Op::UnsetM:
  case Op::SetWithRefLML:
  case Op::SetWithRefRML:
    return false;

  // These are instructions that are always interp-one'd, or are always no-ops.
  case Op::LowInvalid:
  case Op::Nop:
  case Op::Box:
  case Op::BoxR:
  case Op::BoxRNop:
  case Op::UnboxRNop:
  case Op::RGetCNop:
  case Op::AddElemV:
  case Op::AddNewElemV:
  case Op::ClsCns:
  case Op::Exit:
  case Op::Fatal:
  case Op::Unwind:
  case Op::Throw:
  case Op::CGetL3:
  case Op::CGetN:
  case Op::CGetQuietN:
  case Op::VGetN:
  case Op::IssetN:
  case Op::EmptyN:
  case Op::SetN:
  case Op::SetOpN:
  case Op::SetOpG:
  case Op::SetOpS:
  case Op::IncDecN:
  case Op::IncDecG:
  case Op::IncDecS:
  case Op::BindN:
  case Op::UnsetN:
  case Op::UnsetG:
  case Op::FPushObjMethod:
  case Op::FPassC:
  case Op::FPassVNop:
  case Op::FPassN:
  case Op::FCallUnpack:
  case Op::CufSafeArray:
  case Op::CufSafeReturn:
  case Op::Incl:
  case Op::InclOnce:
  case Op::Req:
  case Op::ReqOnce:
  case Op::ReqDoc:
  case Op::Eval:
  case Op::DefClsNop:
  case Op::DefTypeAlias:
  case Op::Catch:
  case Op::HighInvalid:
  case Op::ContAssignDelegate:
  case Op::ContEnterDelegate:
  case Op::YieldFromDelegate:
  case Op::ContUnsetDelegate:
    return true;
  }

  always_assert_flog(0, "invalid opcode {}\n", static_cast<uint32_t>(op));
}
Exemple #28
0
/*
 * Attempts to begin inlining, and returns whether or not it successed.
 *
 * When doing gen-time inlining, we set up a series of IR instructions
 * that looks like this:
 *
 *   fp0  = DefFP
 *   sp   = DefSP<offset>
 *
 *   // ... normal stuff happens ...
 *
 *   // FPI region:
 *     SpillFrame sp, ...
 *     // ... probably some StStks due to argument expressions
 *     fp2   = DefInlineFP<func,retBC,retSP,off> sp
 *
 *         // ... callee body ...
 *
 *     InlineReturn fp2
 *
 * In DCE we attempt to remove the InlineReturn and DefInlineFP instructions if
 * they aren't needed.
 */
bool beginInlining(IRGS& env,
                   unsigned numParams,
                   const Func* target,
                   Offset returnBcOffset) {
  auto const& fpiStack = env.irb->fpiStack();

  assertx(!fpiStack.empty() &&
    "Inlining does not support calls with the FPush* in a different Tracelet");
  assertx(returnBcOffset >= 0 && "returnBcOffset before beginning of caller");
  assertx(curFunc(env)->base() + returnBcOffset < curFunc(env)->past() &&
         "returnBcOffset past end of caller");

  FTRACE(1, "[[[ begin inlining: {}\n", target->fullName()->data());

  SSATmp** params = (SSATmp**)alloca(sizeof(SSATmp*) * numParams);
  for (unsigned i = 0; i < numParams; ++i) {
    params[numParams - i - 1] = popF(env);
  }

  auto const prevSP    = fpiStack.front().returnSP;
  auto const prevSPOff = fpiStack.front().returnSPOff;
  spillStack(env);
  auto const calleeSP  = sp(env);

  always_assert_flog(
    prevSP == calleeSP,
    "FPI stack pointer and callee stack pointer didn't match in beginInlining"
  );

  auto const& info = fpiStack.front();
  always_assert(!isFPushCuf(info.fpushOpc) && !info.interp);

  auto ctx = [&] {
    if (info.ctx || isFPushFunc(info.fpushOpc)) {
      return info.ctx;
    }

    constexpr int32_t adjust = offsetof(ActRec, m_r) - offsetof(ActRec, m_this);
    IRSPOffset ctxOff{invSPOff(env) - info.returnSPOff - adjust};
    return gen(env, LdStk, TCtx, IRSPOffsetData{ctxOff}, sp(env));
  }();

  DefInlineFPData data;
  data.target        = target;
  data.retBCOff      = returnBcOffset;
  data.fromFPushCtor = isFPushCtor(info.fpushOpc);
  data.ctx           = ctx;
  data.retSPOff      = prevSPOff;
  data.spOffset      = offsetFromIRSP(env, BCSPOffset{0});

  // Push state and update the marker before emitting any instructions so
  // they're all given markers in the callee.
  auto const key = SrcKey {
    target,
    target->getEntryForNumArgs(numParams),
    false
  };
  env.bcStateStack.emplace_back(key);
  env.inlineLevel++;
  updateMarker(env);

  auto const calleeFP = gen(env, DefInlineFP, data, calleeSP, fp(env));

  for (unsigned i = 0; i < numParams; ++i) {
    stLocRaw(env, i, calleeFP, params[i]);
  }
  const bool hasVariadicArg = target->hasVariadicCaptureParam();
  for (unsigned i = numParams; i < target->numLocals() - hasVariadicArg; ++i) {
    /*
     * Here we need to be generating hopefully-dead stores to initialize
     * non-parameter locals to KindOfUninit in case we have to leave the trace.
     */
    stLocRaw(env, i, calleeFP, cns(env, TUninit));
  }
  if (hasVariadicArg) {
    auto argNum = target->numLocals() - 1;
    always_assert(numParams <= argNum);
    stLocRaw(env, argNum, calleeFP, cns(env, staticEmptyArray()));
  }

  return true;
}
Exemple #29
0
void unknownBaseType(const TypedValue* tv) {
  always_assert_flog(
    false,
    "Unknown KindOf: {} in member operation base",
    static_cast<uint8_t>(tv->m_type));
}
Exemple #30
0
void Repo::initCentral() {
  std::string error;

  assert(m_dbc == nullptr);
  auto tryPath = [this, &error](const char* path) {
    std::string subErr;
    if (openCentral(path, subErr) == RepoStatus::error) {
      folly::format(&error, "  {}\n", subErr.empty() ? path : subErr);
      return false;
    }
    return true;
  };

  auto fail_no_repo = [&error] {
    error = "Failed to initialize central HHBC repository:\n" + error;
    // Database initialization failed; this is an unrecoverable state.
    Logger::Error("%s", error.c_str());

    if (Process::IsInMainThread()) {
      exit(1);
    }
    always_assert_flog(false, "{}", error);
  };

  // Try Repo.Central.Path
  if (!RuntimeOption::RepoCentralPath.empty() &&
      tryPath(RuntimeOption::RepoCentralPath.c_str())) {
    return;
  }

  // Try HHVM_REPO_CENTRAL_PATH
  const char* HHVM_REPO_CENTRAL_PATH = getenv("HHVM_REPO_CENTRAL_PATH");
  if (HHVM_REPO_CENTRAL_PATH != nullptr &&
      tryPath(HHVM_REPO_CENTRAL_PATH)) {
    return;
  }

  if (!RuntimeOption::RepoAllowFallbackPath) fail_no_repo();

  // Try "$HOME/.hhvm.hhbc".
  char* HOME = getenv("HOME");
  if (HOME != nullptr) {
    std::string centralPath = HOME;
    centralPath += "/.hhvm.hhbc";
    if (tryPath(centralPath.c_str())) {
      return;
    }
  }

#ifndef _WIN32
  // Try the equivalent of "$HOME/.hhvm.hhbc", but look up the home directory
  // in the password database.
  {
    passwd pwbuf;
    passwd* pwbufp;
    long bufsize = sysconf(_SC_GETPW_R_SIZE_MAX);
    if (bufsize != -1) {
      auto buf = new char[bufsize];
      SCOPE_EXIT { delete[] buf; };
      if (!getpwuid_r(getuid(), &pwbuf, buf, size_t(bufsize), &pwbufp)
          && (HOME == nullptr || strcmp(HOME, pwbufp->pw_dir))) {
        std::string centralPath = pwbufp->pw_dir;
        centralPath += "/.hhvm.hhbc";
        if (tryPath(centralPath.c_str())) {
          return;
        }
      }
    }
  }