void erts_nif_export_restore_trace(Process *c_p, Eterm result, NifExport *nep) { NifExportTrace *netp = nep->trace; nep->trace = NULL; erts_bif_trace_epilogue(c_p, result, netp->applying, netp->ep, netp->cp, netp->flags, netp->flags_meta, netp->I, netp->meta_tracer); erts_tracer_update(&netp->meta_tracer, NIL); erts_free(ERTS_ALC_T_NIF_EXP_TRACE, netp); }
void erts_nif_export_save_trace(Process *c_p, NifExport *nep, int applying, Export* ep, BeamInstr *cp, Uint32 flags, Uint32 flags_meta, BeamInstr* I, ErtsTracer meta_tracer) { NifExportTrace *netp; ASSERT(nep && nep->argc >= 0); ASSERT(!nep->trace); netp = erts_alloc(ERTS_ALC_T_NIF_EXP_TRACE, sizeof(NifExportTrace)); netp->applying = applying; netp->ep = ep; netp->cp = cp; netp->flags = flags; netp->flags_meta = flags_meta; netp->I = I; netp->meta_tracer = NIL; erts_tracer_update(&netp->meta_tracer, meta_tracer); nep->trace = netp; }
/* * Entry point called by the trace wrap functions in erl_bif_wrap.c * * The trace wrap functions are themselves called through the export * entries instead of the original BIF functions. */ Eterm erts_bif_trace(int bif_index, Process* p, Eterm* args, BeamInstr* I) { Eterm result; Eterm (*func)(Process*, Eterm*, BeamInstr*); Export* ep = bif_export[bif_index]; Uint32 flags = 0, flags_meta = 0; ErtsTracer meta_tracer = erts_tracer_nil; int applying = (I == &(ep->code[3])); /* Yup, the apply code for a bif * is actually in the * export entry */ BeamInstr *cp = p->cp; GenericBp* g; GenericBpData* bp = NULL; Uint bp_flags = 0; ERTS_SMP_CHK_HAVE_ONLY_MAIN_PROC_LOCK(p); g = (GenericBp *) ep->fake_op_func_info_for_hipe[1]; if (g) { bp = &g->data[erts_active_bp_ix()]; bp_flags = bp->flags; } /* * Make continuation pointer OK, it is not during direct BIF calls, * but it is correct during apply of bif. */ if (!applying) { p->cp = I; } if (bp_flags & (ERTS_BPF_LOCAL_TRACE|ERTS_BPF_GLOBAL_TRACE) && IS_TRACED_FL(p, F_TRACE_CALLS)) { int local = !!(bp_flags & ERTS_BPF_LOCAL_TRACE); flags = erts_call_trace(p, ep->code, bp->local_ms, args, local, &ERTS_TRACER(p)); } if (bp_flags & ERTS_BPF_META_TRACE) { ErtsTracer old_tracer; meta_tracer = erts_smp_atomic_read_nob(&bp->meta_tracer->tracer); old_tracer = meta_tracer; flags_meta = erts_call_trace(p, ep->code, bp->meta_ms, args, 0, &meta_tracer); if (!ERTS_TRACER_COMPARE(old_tracer, meta_tracer)) { ErtsTracer new_tracer = erts_tracer_nil; erts_tracer_update(&new_tracer, meta_tracer); if (old_tracer == erts_smp_atomic_cmpxchg_acqb( &bp->meta_tracer->tracer, (erts_aint_t)new_tracer, (erts_aint_t)old_tracer)) { ERTS_TRACER_CLEAR(&old_tracer); } else { ERTS_TRACER_CLEAR(&new_tracer); } } } if (bp_flags & ERTS_BPF_TIME_TRACE_ACTIVE && IS_TRACED_FL(p, F_TRACE_CALLS)) { BeamInstr *pc = (BeamInstr *)ep->code+3; erts_trace_time_call(p, pc, bp->time); } /* Restore original continuation pointer (if changed). */ p->cp = cp; func = bif_table[bif_index].f; result = func(p, args, I); if (applying && (flags & MATCH_SET_RETURN_TO_TRACE)) { BeamInstr i_return_trace = beam_return_trace[0]; BeamInstr i_return_to_trace = beam_return_to_trace[0]; BeamInstr i_return_time_trace = beam_return_time_trace[0]; Eterm *cpp; /* Maybe advance cp to skip trace stack frames */ for (cpp = p->stop; ; cp = cp_val(*cpp++)) { if (*cp == i_return_trace) { /* Skip stack frame variables */ while (is_not_CP(*cpp)) cpp++; cpp += 2; /* Skip return_trace parameters */ } else if (*cp == i_return_time_trace) { /* Skip stack frame variables */ while (is_not_CP(*cpp)) cpp++; cpp += 1; /* Skip return_time_trace parameters */ } else if (*cp == i_return_to_trace) { /* A return_to trace message is going to be generated * by normal means, so we do not have to. */ cp = NULL; break; } else break; } } /* Try to get these in the order * they usually appear in normal code... */ if (is_non_value(result)) { Uint reason = p->freason; if (reason != TRAP) { Eterm class; Eterm value = p->fvalue; /* Expand error value like in handle_error() */ if (reason & EXF_ARGLIST) { Eterm *tp; ASSERT(is_tuple(value)); tp = tuple_val(value); value = tp[1]; } if ((reason & EXF_THROWN) && (p->catches <= 0)) { Eterm *hp = HAlloc(p, 3); value = TUPLE2(hp, am_nocatch, value); reason = EXC_ERROR; } /* Note: expand_error_value() could theoretically * allocate on the heap, but not for any error * returned by a BIF, and it would do no harm, * just be annoying. */ value = expand_error_value(p, reason, value); class = exception_tag[GET_EXC_CLASS(reason)]; if (flags_meta & MATCH_SET_EXCEPTION_TRACE) { erts_trace_exception(p, ep->code, class, value, &meta_tracer); } if (flags & MATCH_SET_EXCEPTION_TRACE) { erts_trace_exception(p, ep->code, class, value, &ERTS_TRACER(p)); }
/* * Entry point called by the trace wrap functions in erl_bif_wrap.c * * The trace wrap functions are themselves called through the export * entries instead of the original BIF functions. */ Eterm erts_bif_trace(int bif_index, Process* p, Eterm* args, BeamInstr* I) { Eterm result; Eterm (*func)(Process*, Eterm*, BeamInstr*); Export* ep = bif_export[bif_index]; Uint32 flags = 0, flags_meta = 0; ErtsTracer meta_tracer = erts_tracer_nil; int applying = (I == ep->beam); /* Yup, the apply code for a bif * is actually in the * export entry */ BeamInstr *cp = p->cp; GenericBp* g; GenericBpData* bp = NULL; Uint bp_flags = 0; ERTS_SMP_CHK_HAVE_ONLY_MAIN_PROC_LOCK(p); g = ep->info.u.gen_bp; if (g) { bp = &g->data[erts_active_bp_ix()]; bp_flags = bp->flags; } /* * Make continuation pointer OK, it is not during direct BIF calls, * but it is correct during apply of bif. */ if (!applying) { p->cp = I; } if (bp_flags & (ERTS_BPF_LOCAL_TRACE|ERTS_BPF_GLOBAL_TRACE) && IS_TRACED_FL(p, F_TRACE_CALLS)) { int local = !!(bp_flags & ERTS_BPF_LOCAL_TRACE); flags = erts_call_trace(p, &ep->info, bp->local_ms, args, local, &ERTS_TRACER(p)); } if (bp_flags & ERTS_BPF_META_TRACE) { ErtsTracer old_tracer; meta_tracer = erts_smp_atomic_read_nob(&bp->meta_tracer->tracer); old_tracer = meta_tracer; flags_meta = erts_call_trace(p, &ep->info, bp->meta_ms, args, 0, &meta_tracer); if (!ERTS_TRACER_COMPARE(old_tracer, meta_tracer)) { ErtsTracer new_tracer = erts_tracer_nil; erts_tracer_update(&new_tracer, meta_tracer); if (old_tracer == erts_smp_atomic_cmpxchg_acqb( &bp->meta_tracer->tracer, (erts_aint_t)new_tracer, (erts_aint_t)old_tracer)) { ERTS_TRACER_CLEAR(&old_tracer); } else { ERTS_TRACER_CLEAR(&new_tracer); } } } if (bp_flags & ERTS_BPF_TIME_TRACE_ACTIVE && IS_TRACED_FL(p, F_TRACE_CALLS)) { erts_trace_time_call(p, &ep->info, bp->time); } /* Restore original continuation pointer (if changed). */ p->cp = cp; func = bif_table[bif_index].f; result = func(p, args, I); if (erts_nif_export_check_save_trace(p, result, applying, ep, cp, flags, flags_meta, I, meta_tracer)) { /* * erts_bif_trace_epilogue() will be called * later when appropriate via the NIF export * scheduling functionality... */ return result; } return erts_bif_trace_epilogue(p, result, applying, ep, cp, flags, flags_meta, I, meta_tracer); }