void erts_ioq_init(ErtsIOQueue *q, ErtsAlcType_t alct, int driver) { ERTS_CT_ASSERT(offsetof(ErlNifIOVec,flags) == sizeof(ErtsIOVecCommon)); ERTS_CT_ASSERT(sizeof(ErlIOVec) == sizeof(ErtsIOVecCommon)); ERTS_CT_ASSERT(sizeof(size_t) == sizeof(ErlDrvSizeT)); ERTS_CT_ASSERT(sizeof(size_t) == sizeof(Uint)); q->alct = alct; q->driver = driver; q->size = 0; q->v_head = q->v_tail = q->v_start = q->v_small; q->v_end = q->v_small + ERTS_SMALL_IO_QUEUE; q->b_head = q->b_tail = q->b_start = q->b_small; q->b_end = q->b_small + ERTS_SMALL_IO_QUEUE; }
static void iol2v_expand_acc(iol2v_state_t *state, UWord extra) { UWord required_bytes, acc_alloc_size; ERTS_CT_ASSERT(ERTS_UWORD_MAX > ACC_REALLOCATION_LIMIT / 2); ASSERT(extra >= 1); acc_alloc_size = state->acc != NULL ? (state->acc)->orig_size : 0; required_bytes = state->acc_size + extra; if (state->acc == NULL) { UWord new_size = MAX(required_bytes, IOL2V_SMALL_BIN_LIMIT); state->acc = erts_bin_nrml_alloc(new_size); } else if (required_bytes > acc_alloc_size) { Binary *prev_acc; UWord new_size; if (acc_alloc_size >= ACC_REALLOCATION_LIMIT) { /* We skip reallocating once we hit a certain point; it often * results in extra copying and we're very likely to overallocate * on anything other than absurdly long byte/heapbin sequences. */ iol2v_enqueue_result(state, iol2v_promote_acc(state)); iol2v_expand_acc(state, extra); return; } new_size = MAX(required_bytes, acc_alloc_size * 2); prev_acc = state->acc; state->acc = erts_bin_realloc(prev_acc, new_size); if (prev_acc != state->acc) { state->bytereds_spent += state->acc_size; } } state->bytereds_spent += extra; }
void erts_mtrace_install_wrapper_functions(void) { if (erts_mtrace_enabled) { int i; /* Install trace functions */ ERTS_CT_ASSERT(sizeof(erts_allctrs) == sizeof(real_allctrs)); sys_memcpy((void *) real_allctrs, (void *) erts_allctrs, sizeof(erts_allctrs)); for (i = ERTS_ALC_A_MIN; i <= ERTS_ALC_A_MAX; i++) { erts_allctrs[i].alloc = mtrace_alloc; erts_allctrs[i].realloc = mtrace_realloc; erts_allctrs[i].free = mtrace_free; erts_allctrs[i].extra = (void *) &real_allctrs[i]; } mtrace_wrapper.lock = mtrace_pre_lock; mtrace_wrapper.unlock = mtrace_pre_unlock; erts_allctr_wrapper_prelock_init(&mtrace_wrapper); } }
Uint erts_instr_init(int stat, int map_stat) { Uint extra_sz; int i; am_tot = NULL; am_n = NULL; am_c = NULL; am_a = NULL; erts_instr_memory_map = 0; erts_instr_stat = 0; atoms_initialized = 0; if (!stat && !map_stat) return 0; stats = erts_alloc(ERTS_ALC_T_INSTR_INFO, sizeof(struct stats_)); erts_mtx_init(&instr_mutex, "instr"); mem_anchor = NULL; /* Install instrumentation functions */ ERTS_CT_ASSERT(sizeof(erts_allctrs) == sizeof(real_allctrs)); sys_memcpy((void *)real_allctrs,(void *)erts_allctrs,sizeof(erts_allctrs)); sys_memzero((void *) &stats->tot, sizeof(Stat_t)); sys_memzero((void *) stats->a, sizeof(Stat_t)*(ERTS_ALC_A_MAX+1)); sys_memzero((void *) stats->c, sizeof(Stat_t)*(ERTS_ALC_C_MAX+1)); sys_memzero((void *) stats->n, sizeof(Stat_t)*(ERTS_ALC_N_MAX+1)); for (i = ERTS_ALC_A_MIN; i <= ERTS_ALC_A_MAX; i++) { if (erts_allctrs_info[i].enabled) stats->ap[i] = &stats->a[i]; else stats->ap[i] = &stats->a[ERTS_ALC_A_SYSTEM]; } if (map_stat) { erts_mtx_init(&instr_x_mutex, "instr_x"); erts_instr_memory_map = 1; erts_instr_stat = 1; for (i = ERTS_ALC_A_MIN; i <= ERTS_ALC_A_MAX; i++) { erts_allctrs[i].alloc = map_stat_alloc; erts_allctrs[i].realloc = map_stat_realloc; erts_allctrs[i].free = map_stat_free; erts_allctrs[i].extra = (void *) &real_allctrs[i]; } instr_wrapper.lock = map_stat_pre_lock; instr_wrapper.unlock = map_stat_pre_unlock; extra_sz = MAP_STAT_BLOCK_HEADER_SIZE; } else { erts_instr_stat = 1; for (i = ERTS_ALC_A_MIN; i <= ERTS_ALC_A_MAX; i++) { erts_allctrs[i].alloc = stat_alloc; erts_allctrs[i].realloc = stat_realloc; erts_allctrs[i].free = stat_free; erts_allctrs[i].extra = (void *) &real_allctrs[i]; } instr_wrapper.lock = stat_pre_lock; instr_wrapper.unlock = stat_pre_unlock; extra_sz = STAT_BLOCK_HEADER_SIZE; } erts_allctr_wrapper_prelock_init(&instr_wrapper); return extra_sz; }