static uword allocate_per_cpu_mheap (uword cpu) { clib_smp_main_t * m = &clib_smp_main; void * heap; uword vm_size, stack_size, mheap_flags; ASSERT (os_get_cpu_number () == cpu); vm_size = (uword) 1 << m->log2_n_per_cpu_vm_bytes; stack_size = (uword) 1 << m->log2_n_per_cpu_stack_bytes; mheap_flags = MHEAP_FLAG_SMALL_OBJECT_CACHE; /* Heap extends up to start of stack. */ heap = mheap_alloc_with_flags (clib_smp_vm_base_for_cpu (m, cpu), vm_size - stack_size, mheap_flags); clib_mem_set_heap (heap); if (cpu == 0) { /* Now that we have a heap, allocate main structure on cpu 0. */ vec_resize (m->per_cpu_mains, m->n_cpus); /* Allocate shared global heap (thread safe). */ m->global_heap = mheap_alloc_with_flags (clib_smp_vm_base_for_cpu (m, cpu + m->n_cpus), vm_size, mheap_flags | MHEAP_FLAG_THREAD_SAFE); } m->per_cpu_mains[cpu].heap = heap; return 0; }
/* Free up all trace buffer memory. */ always_inline void clear_trace_buffer (void) { int i; vlib_trace_main_t * tm; foreach_vlib_main ( ({ void *mainheap; tm = &this_vlib_main->trace_main; mainheap = clib_mem_set_heap (this_vlib_main->heap_base); for (i = 0; i < vec_len (tm->trace_buffer_pool); i++) if (! pool_is_free_index (tm->trace_buffer_pool, i)) vec_free (tm->trace_buffer_pool[i]); pool_free (tm->trace_buffer_pool); clib_mem_set_heap (mainheap); }));