void erts_install_breakpoints(BpFunctions* f) { Uint i; Uint n = f->matched; BeamInstr br = (BeamInstr) BeamOp(op_i_generic_breakpoint); for (i = 0; i < n; i++) { BeamInstr* pc = f->matching[i].pc; GenericBp* g = (GenericBp *) pc[-4]; if (*pc != br && g) { Module* modp = f->matching[i].mod; /* * The breakpoint must be disabled in the active data * (it will enabled later by switching bp indices), * and enabled in the staging data. */ ASSERT(g->data[erts_active_bp_ix()].flags == 0); ASSERT(g->data[erts_staging_bp_ix()].flags != 0); /* * The following write is not protected by any lock. We * assume that the hardware guarantees that a write of an * aligned word-size (or half-word) writes is atomic * (i.e. that other processes executing this code will not * see a half pointer). */ *pc = br; modp->curr.num_breakpoints++; } } }
void erts_commit_staged_bp(void) { ErtsBpIndex staging = erts_staging_bp_ix(); ErtsBpIndex active = erts_active_bp_ix(); erts_smp_atomic32_set_nob(&erts_active_bp_index, staging); erts_smp_atomic32_set_nob(&erts_staging_bp_index, active); }
void erts_clear_call_trace_bif(BeamInstr *pc, int local) { GenericBp* g = (GenericBp *) pc[-4]; if (g) { Uint flags = local ? ERTS_BPF_LOCAL_TRACE : ERTS_BPF_GLOBAL_TRACE; if (g->data[erts_staging_bp_ix()].flags & flags) { clear_function_break(pc, flags); } } }
void erts_clear_call_trace_bif(ErtsCodeInfo *ci, int local) { GenericBp* g = ci->u.gen_bp; if (g) { Uint flags = local ? ERTS_BPF_LOCAL_TRACE : ERTS_BPF_GLOBAL_TRACE; if (g->data[erts_staging_bp_ix()].flags & flags) { clear_function_break(ci, flags); } } }
static void consolidate_bp_data(Module* modp, BeamInstr* pc, int local) { GenericBp* g = (GenericBp *) pc[-4]; GenericBpData* src; GenericBpData* dst; Uint flags; if (g == 0) { return; } src = &g->data[erts_active_bp_ix()]; dst = &g->data[erts_staging_bp_ix()]; /* * The contents of the staging area may be out of date. * Decrement all reference pointers. */ flags = dst->flags; if (flags & (ERTS_BPF_LOCAL_TRACE|ERTS_BPF_GLOBAL_TRACE)) { MatchSetUnref(dst->local_ms); } if (flags & ERTS_BPF_META_TRACE) { bp_meta_unref(dst->meta_tracer); MatchSetUnref(dst->meta_ms); } if (flags & ERTS_BPF_COUNT) { bp_count_unref(dst->count); } if (flags & ERTS_BPF_TIME_TRACE) { bp_time_unref(dst->time); } /* * If all flags are zero, deallocate all breakpoint data. */ flags = dst->flags = src->flags; if (flags == 0) { if (modp) { if (local) { modp->curr.num_breakpoints--; } else { modp->curr.num_traced_exports--; } ASSERT(modp->curr.num_breakpoints >= 0); ASSERT(modp->curr.num_traced_exports >= 0); ASSERT(*pc != (BeamInstr) BeamOp(op_i_generic_breakpoint)); } pc[-4] = 0; Free(g); return; } /* * Copy the active data to the staging area (making it ready * for the next time it will be used). */ if (flags & (ERTS_BPF_LOCAL_TRACE|ERTS_BPF_GLOBAL_TRACE)) { dst->local_ms = src->local_ms; MatchSetRef(dst->local_ms); } if (flags & ERTS_BPF_META_TRACE) { dst->meta_tracer = src->meta_tracer; erts_refc_inc(&dst->meta_tracer->refc, 1); dst->meta_ms = src->meta_ms; MatchSetRef(dst->meta_ms); } if (flags & ERTS_BPF_COUNT) { dst->count = src->count; erts_refc_inc(&dst->count->refc, 1); } if (flags & ERTS_BPF_TIME_TRACE) { dst->time = src->time; erts_refc_inc(&dst->time->refc, 1); ASSERT(dst->time->hash); } }