/* * SMP NOTE: Process p may have become exiting on return! */ Uint32 erts_bif_mtrace(Process *p, BeamInstr *pc, Eterm *args, int local, Eterm *tracer_pid) { BpData **bds = (BpData **) (pc)[-4]; BpDataTrace *bdt = NULL; ASSERT(tracer_pid); if (bds) { Eterm tpid1, tpid2; Uint32 flags; bdt = (BpDataTrace *)bds[bp_sched2ix_proc(p)]; ErtsSmpBPLock(bdt); tpid1 = tpid2 = bdt->tracer_pid; ErtsSmpBPUnlock(bdt); flags = erts_call_trace(p, pc-3/*mfa*/, bdt->match_spec, args, local, &tpid2); *tracer_pid = tpid2; if (tpid1 != tpid2) { ErtsSmpBPLock(bdt); bdt->tracer_pid = tpid2; ErtsSmpBPUnlock(bdt); } return flags; } *tracer_pid = NIL; return 0; }
/* * SMP NOTE: Process p may have become exiting on return! */ BeamInstr erts_trace_break(Process *p, BeamInstr *pc, Eterm *args, Uint32 *ret_flags, Eterm *tracer_pid) { Eterm tpid1, tpid2; BpData **bds = (BpData **) (pc)[-4]; BpDataTrace *bdt = NULL; ASSERT(bds); ASSERT(pc[-5] == (BeamInstr) BeamOp(op_i_func_info_IaaI)); bdt = (BpDataTrace *) bds[bp_sched2ix_proc(p)]; ASSERT(bdt); bdt = (BpDataTrace *) bdt->next; ASSERT(bdt); ASSERT(ret_flags); ASSERT(tracer_pid); ErtsSmpBPLock(bdt); tpid1 = tpid2 = bdt->tracer_pid; ErtsSmpBPUnlock(bdt); *ret_flags = erts_call_trace(p, pc-3/*mfa*/, bdt->match_spec, args, 1, &tpid2); *tracer_pid = tpid2; if (tpid1 != tpid2) { ErtsSmpBPLock(bdt); bdt->tracer_pid = tpid2; ErtsSmpBPUnlock(bdt); } bds[bp_sched2ix_proc(p)] = (BpData *) bdt; return bdt->orig_instr; }
/* * SMP NOTE: Process p may have become exiting on return! */ Uint32 erts_bif_mtrace(Process *p, Uint *pc, Eterm *args, int local, Eterm *tracer_pid) { BpDataTrace *bdt = (BpDataTrace *) pc[-4]; ASSERT(tracer_pid); if (bdt) { Eterm tpid1, tpid2; Uint32 flags; ErtsSmpBPLock(bdt); tpid1 = tpid2 = bdt->tracer_pid; ErtsSmpBPUnlock(bdt); flags = erts_call_trace(p, pc-3/*mfa*/, bdt->match_spec, args, local, &tpid2); *tracer_pid = tpid2; if (tpid1 != tpid2) { ErtsSmpBPLock(bdt); bdt->tracer_pid = tpid2; ErtsSmpBPUnlock(bdt); } return flags; } *tracer_pid = NIL; return 0; }
/* * SMP NOTE: Process p may have become exiting on return! */ Uint erts_trace_break(Process *p, Uint *pc, Eterm *args, Uint32 *ret_flags, Eterm *tracer_pid) { Eterm tpid1, tpid2; BpDataTrace *bdt = (BpDataTrace *) pc[-4]; ASSERT(pc[-5] == (Uint) BeamOp(op_i_func_info_IaaI)); ASSERT(bdt); bdt = (BpDataTrace *) bdt->next; ASSERT(bdt); ASSERT(ret_flags); ASSERT(tracer_pid); ErtsSmpBPLock(bdt); tpid1 = tpid2 = bdt->tracer_pid; ErtsSmpBPUnlock(bdt); *ret_flags = erts_call_trace(p, pc-3/*mfa*/, bdt->match_spec, args, 1, &tpid2); *tracer_pid = tpid2; if (tpid1 != tpid2) { ErtsSmpBPLock(bdt); bdt->tracer_pid = tpid2; ErtsSmpBPUnlock(bdt); } pc[-4] = (Uint) bdt; return bdt->orig_instr; }
/* * Entry point called by the trace wrap functions in erl_bif_wrap.c * * The trace wrap functions are themselves called through the export * entries instead of the original BIF functions. */ Eterm erts_bif_trace(int bif_index, Process* p, Eterm* args, BeamInstr* I) { Eterm result; Eterm (*func)(Process*, Eterm*, BeamInstr*); Export* ep = bif_export[bif_index]; Uint32 flags = 0, flags_meta = 0; ErtsTracer meta_tracer = erts_tracer_nil; int applying = (I == &(ep->code[3])); /* Yup, the apply code for a bif * is actually in the * export entry */ BeamInstr *cp = p->cp; GenericBp* g; GenericBpData* bp = NULL; Uint bp_flags = 0; ERTS_SMP_CHK_HAVE_ONLY_MAIN_PROC_LOCK(p); g = (GenericBp *) ep->fake_op_func_info_for_hipe[1]; if (g) { bp = &g->data[erts_active_bp_ix()]; bp_flags = bp->flags; } /* * Make continuation pointer OK, it is not during direct BIF calls, * but it is correct during apply of bif. */ if (!applying) { p->cp = I; } if (bp_flags & (ERTS_BPF_LOCAL_TRACE|ERTS_BPF_GLOBAL_TRACE) && IS_TRACED_FL(p, F_TRACE_CALLS)) { int local = !!(bp_flags & ERTS_BPF_LOCAL_TRACE); flags = erts_call_trace(p, ep->code, bp->local_ms, args, local, &ERTS_TRACER(p)); } if (bp_flags & ERTS_BPF_META_TRACE) { ErtsTracer old_tracer; meta_tracer = erts_smp_atomic_read_nob(&bp->meta_tracer->tracer); old_tracer = meta_tracer; flags_meta = erts_call_trace(p, ep->code, bp->meta_ms, args, 0, &meta_tracer); if (!ERTS_TRACER_COMPARE(old_tracer, meta_tracer)) { ErtsTracer new_tracer = erts_tracer_nil; erts_tracer_update(&new_tracer, meta_tracer); if (old_tracer == erts_smp_atomic_cmpxchg_acqb( &bp->meta_tracer->tracer, (erts_aint_t)new_tracer, (erts_aint_t)old_tracer)) { ERTS_TRACER_CLEAR(&old_tracer); } else { ERTS_TRACER_CLEAR(&new_tracer); } } } if (bp_flags & ERTS_BPF_TIME_TRACE_ACTIVE && IS_TRACED_FL(p, F_TRACE_CALLS)) { BeamInstr *pc = (BeamInstr *)ep->code+3; erts_trace_time_call(p, pc, bp->time); } /* Restore original continuation pointer (if changed). */ p->cp = cp; func = bif_table[bif_index].f; result = func(p, args, I); if (applying && (flags & MATCH_SET_RETURN_TO_TRACE)) { BeamInstr i_return_trace = beam_return_trace[0]; BeamInstr i_return_to_trace = beam_return_to_trace[0]; BeamInstr i_return_time_trace = beam_return_time_trace[0]; Eterm *cpp; /* Maybe advance cp to skip trace stack frames */ for (cpp = p->stop; ; cp = cp_val(*cpp++)) { if (*cp == i_return_trace) { /* Skip stack frame variables */ while (is_not_CP(*cpp)) cpp++; cpp += 2; /* Skip return_trace parameters */ } else if (*cp == i_return_time_trace) { /* Skip stack frame variables */ while (is_not_CP(*cpp)) cpp++; cpp += 1; /* Skip return_time_trace parameters */ } else if (*cp == i_return_to_trace) { /* A return_to trace message is going to be generated * by normal means, so we do not have to. */ cp = NULL; break; } else break; } } /* Try to get these in the order * they usually appear in normal code... */ if (is_non_value(result)) { Uint reason = p->freason; if (reason != TRAP) { Eterm class; Eterm value = p->fvalue; /* Expand error value like in handle_error() */ if (reason & EXF_ARGLIST) { Eterm *tp; ASSERT(is_tuple(value)); tp = tuple_val(value); value = tp[1]; } if ((reason & EXF_THROWN) && (p->catches <= 0)) { Eterm *hp = HAlloc(p, 3); value = TUPLE2(hp, am_nocatch, value); reason = EXC_ERROR; } /* Note: expand_error_value() could theoretically * allocate on the heap, but not for any error * returned by a BIF, and it would do no harm, * just be annoying. */ value = expand_error_value(p, reason, value); class = exception_tag[GET_EXC_CLASS(reason)]; if (flags_meta & MATCH_SET_EXCEPTION_TRACE) { erts_trace_exception(p, ep->code, class, value, &meta_tracer); } if (flags & MATCH_SET_EXCEPTION_TRACE) { erts_trace_exception(p, ep->code, class, value, &ERTS_TRACER(p)); }
/* * Entry point called by the trace wrap functions in erl_bif_wrap.c * * The trace wrap functions are themselves called through the export * entries instead of the original BIF functions. */ Eterm erts_bif_trace(int bif_index, Process* p, Eterm* args, BeamInstr* I) { Eterm result; Eterm (*func)(Process*, Eterm*, BeamInstr*); Export* ep = bif_export[bif_index]; Uint32 flags = 0, flags_meta = 0; ErtsTracer meta_tracer = erts_tracer_nil; int applying = (I == ep->beam); /* Yup, the apply code for a bif * is actually in the * export entry */ BeamInstr *cp = p->cp; GenericBp* g; GenericBpData* bp = NULL; Uint bp_flags = 0; ERTS_SMP_CHK_HAVE_ONLY_MAIN_PROC_LOCK(p); g = ep->info.u.gen_bp; if (g) { bp = &g->data[erts_active_bp_ix()]; bp_flags = bp->flags; } /* * Make continuation pointer OK, it is not during direct BIF calls, * but it is correct during apply of bif. */ if (!applying) { p->cp = I; } if (bp_flags & (ERTS_BPF_LOCAL_TRACE|ERTS_BPF_GLOBAL_TRACE) && IS_TRACED_FL(p, F_TRACE_CALLS)) { int local = !!(bp_flags & ERTS_BPF_LOCAL_TRACE); flags = erts_call_trace(p, &ep->info, bp->local_ms, args, local, &ERTS_TRACER(p)); } if (bp_flags & ERTS_BPF_META_TRACE) { ErtsTracer old_tracer; meta_tracer = erts_smp_atomic_read_nob(&bp->meta_tracer->tracer); old_tracer = meta_tracer; flags_meta = erts_call_trace(p, &ep->info, bp->meta_ms, args, 0, &meta_tracer); if (!ERTS_TRACER_COMPARE(old_tracer, meta_tracer)) { ErtsTracer new_tracer = erts_tracer_nil; erts_tracer_update(&new_tracer, meta_tracer); if (old_tracer == erts_smp_atomic_cmpxchg_acqb( &bp->meta_tracer->tracer, (erts_aint_t)new_tracer, (erts_aint_t)old_tracer)) { ERTS_TRACER_CLEAR(&old_tracer); } else { ERTS_TRACER_CLEAR(&new_tracer); } } } if (bp_flags & ERTS_BPF_TIME_TRACE_ACTIVE && IS_TRACED_FL(p, F_TRACE_CALLS)) { erts_trace_time_call(p, &ep->info, bp->time); } /* Restore original continuation pointer (if changed). */ p->cp = cp; func = bif_table[bif_index].f; result = func(p, args, I); if (erts_nif_export_check_save_trace(p, result, applying, ep, cp, flags, flags_meta, I, meta_tracer)) { /* * erts_bif_trace_epilogue() will be called * later when appropriate via the NIF export * scheduling functionality... */ return result; } return erts_bif_trace_epilogue(p, result, applying, ep, cp, flags, flags_meta, I, meta_tracer); }