static inline void init_cpu_features (struct cpu_features *cpu_features) { /* Default is to use aligned memory access on optimized function unless tunables is enable, since for this case user can explicit disable unaligned optimizations. */ #if HAVE_TUNABLES int32_t cached_memfunc = TUNABLE_GET (glibc, tune, cached_memopt, int32_t, NULL); cpu_features->use_cached_memopt = (cached_memfunc > 0); #else cpu_features->use_cached_memopt = false; #endif }
void __pthread_tunables_init (void) { TUNABLE_GET (mutex_spin_count, int32_t, TUNABLE_CALLBACK (set_mutex_spin_count)); }
static void ptmalloc_init (void) { if (__malloc_initialized >= 0) return; __malloc_initialized = 0; #ifdef SHARED /* In case this libc copy is in a non-default namespace, never use brk. Likewise if dlopened from statically linked program. */ Dl_info di; struct link_map *l; if (_dl_open_hook != NULL || (_dl_addr (ptmalloc_init, &di, &l, NULL) != 0 && l->l_ns != LM_ID_BASE)) __morecore = __failing_morecore; #endif thread_arena = &main_arena; malloc_init_state (&main_arena); #if HAVE_TUNABLES TUNABLE_GET (check, int32_t, TUNABLE_CALLBACK (set_mallopt_check)); TUNABLE_GET (top_pad, size_t, TUNABLE_CALLBACK (set_top_pad)); TUNABLE_GET (perturb, int32_t, TUNABLE_CALLBACK (set_perturb_byte)); TUNABLE_GET (mmap_threshold, size_t, TUNABLE_CALLBACK (set_mmap_threshold)); TUNABLE_GET (trim_threshold, size_t, TUNABLE_CALLBACK (set_trim_threshold)); TUNABLE_GET (mmap_max, int32_t, TUNABLE_CALLBACK (set_mmaps_max)); TUNABLE_GET (arena_max, size_t, TUNABLE_CALLBACK (set_arena_max)); TUNABLE_GET (arena_test, size_t, TUNABLE_CALLBACK (set_arena_test)); # if USE_TCACHE TUNABLE_GET (tcache_max, size_t, TUNABLE_CALLBACK (set_tcache_max)); TUNABLE_GET (tcache_count, size_t, TUNABLE_CALLBACK (set_tcache_count)); TUNABLE_GET (tcache_unsorted_limit, size_t, TUNABLE_CALLBACK (set_tcache_unsorted_limit)); # endif #else const char *s = NULL; if (__glibc_likely (_environ != NULL)) { char **runp = _environ; char *envline; while (__builtin_expect ((envline = next_env_entry (&runp)) != NULL, 0)) { size_t len = strcspn (envline, "="); if (envline[len] != '=') /* This is a "MALLOC_" variable at the end of the string without a '=' character. Ignore it since otherwise we will access invalid memory below. */ continue; switch (len) { case 6: if (memcmp (envline, "CHECK_", 6) == 0) s = &envline[7]; break; case 8: if (!__builtin_expect (__libc_enable_secure, 0)) { if (memcmp (envline, "TOP_PAD_", 8) == 0) __libc_mallopt (M_TOP_PAD, atoi (&envline[9])); else if (memcmp (envline, "PERTURB_", 8) == 0) __libc_mallopt (M_PERTURB, atoi (&envline[9])); } break; case 9: if (!__builtin_expect (__libc_enable_secure, 0)) { if (memcmp (envline, "MMAP_MAX_", 9) == 0) __libc_mallopt (M_MMAP_MAX, atoi (&envline[10])); else if (memcmp (envline, "ARENA_MAX", 9) == 0) __libc_mallopt (M_ARENA_MAX, atoi (&envline[10])); } break; case 10: if (!__builtin_expect (__libc_enable_secure, 0)) { if (memcmp (envline, "ARENA_TEST", 10) == 0) __libc_mallopt (M_ARENA_TEST, atoi (&envline[11])); } break; case 15: if (!__builtin_expect (__libc_enable_secure, 0)) { if (memcmp (envline, "TRIM_THRESHOLD_", 15) == 0) __libc_mallopt (M_TRIM_THRESHOLD, atoi (&envline[16])); else if (memcmp (envline, "MMAP_THRESHOLD_", 15) == 0) __libc_mallopt (M_MMAP_THRESHOLD, atoi (&envline[16])); } break; default: break; } } } if (s && s[0] != '\0' && s[0] != '0') __malloc_check_init (); #endif #if HAVE_MALLOC_INIT_HOOK void (*hook) (void) = atomic_forced_read (__malloc_initialize_hook); if (hook != NULL) (*hook)(); #endif __malloc_initialized = 1; }