VgStack* VG_(am_alloc_VgStack)( /*OUT*/Addr* initial_sp ) { Int szB; SysRes sres; VgStack* stack; UInt* p; Int i; /* Allocate the stack. */ szB = VG_STACK_GUARD_SZB + VG_STACK_ACTIVE_SZB + VG_STACK_GUARD_SZB; #if !defined(VGPV_ppc64_linux_bgq) sres = VG_(am_mmap_anon_float_valgrind)( szB ); if (sr_isError(sres)) return NULL; stack = (VgStack*)(AddrH)sr_Res(sres); aspacem_assert(VG_IS_PAGE_ALIGNED(szB)); aspacem_assert(VG_IS_PAGE_ALIGNED(stack)); /* Protect the guard areas. */ sres = local_do_mprotect_NO_NOTIFY( (Addr) &stack[0], VG_STACK_GUARD_SZB, VKI_PROT_NONE ); if (sr_isError(sres)) goto protect_failed; VG_(am_notify_mprotect)( (Addr) &stack->bytes[0], VG_STACK_GUARD_SZB, VKI_PROT_NONE ); sres = local_do_mprotect_NO_NOTIFY( (Addr) &stack->bytes[VG_STACK_GUARD_SZB + VG_STACK_ACTIVE_SZB], VG_STACK_GUARD_SZB, VKI_PROT_NONE ); if (sr_isError(sres)) goto protect_failed; VG_(am_notify_mprotect)( (Addr) &stack->bytes[VG_STACK_GUARD_SZB + VG_STACK_ACTIVE_SZB], VG_STACK_GUARD_SZB, VKI_PROT_NONE ); #else { sres = VG_(am_mmap_anon_float_valgrind)( szB ); if (sr_isError(sres)) return NULL; stack = (VgStack*)sr_Res(sres); } #endif /* Looks good. Fill the active area with junk so we can later tell how much got used. */ p = (UInt*)&stack->bytes[VG_STACK_GUARD_SZB]; for (i = 0; i < VG_STACK_ACTIVE_SZB/sizeof(UInt); i++) p[i] = 0xDEADBEEF; *initial_sp = (Addr)&stack->bytes[VG_STACK_GUARD_SZB + VG_STACK_ACTIVE_SZB]; *initial_sp -= 8; *initial_sp &= ~((Addr)0x1F); /* 32-align it */ VG_(debugLog)( 1,"aspacem","allocated thread stack at 0x%llx size %d\n", (ULong)(Addr)stack, szB); ML_(am_do_sanity_check)(); return stack; protect_failed: /* The stack was allocated, but we can't protect it. Unmap it and return NULL (failure). */ (void)ML_(am_do_munmap_NO_NOTIFY)( (Addr)stack, szB ); ML_(am_do_sanity_check)(); return NULL; }
VgStack* VG_(am_alloc_VgStack)( Addr* initial_sp ) { Int szB; SysRes sres; VgStack* stack; UInt* p; Int i; szB = VG_STACK_GUARD_SZB + VG_STACK_ACTIVE_SZB + VG_STACK_GUARD_SZB; sres = VG_(am_mmap_anon_float_valgrind)( szB ); if (sr_isError(sres)) return NULL; stack = (VgStack*)(AddrH)sr_Res(sres); aspacem_assert(VG_IS_PAGE_ALIGNED(szB)); aspacem_assert(VG_IS_PAGE_ALIGNED(stack)); sres = local_do_mprotect_NO_NOTIFY( (Addr) &stack[0], VG_STACK_GUARD_SZB, VKI_PROT_NONE ); if (sr_isError(sres)) goto protect_failed; VG_(am_notify_mprotect)( (Addr) &stack->bytes[0], VG_STACK_GUARD_SZB, VKI_PROT_NONE ); sres = local_do_mprotect_NO_NOTIFY( (Addr) &stack->bytes[VG_STACK_GUARD_SZB + VG_STACK_ACTIVE_SZB], VG_STACK_GUARD_SZB, VKI_PROT_NONE ); if (sr_isError(sres)) goto protect_failed; VG_(am_notify_mprotect)( (Addr) &stack->bytes[VG_STACK_GUARD_SZB + VG_STACK_ACTIVE_SZB], VG_STACK_GUARD_SZB, VKI_PROT_NONE ); p = (UInt*)&stack->bytes[VG_STACK_GUARD_SZB]; for (i = 0; i < VG_STACK_ACTIVE_SZB/sizeof(UInt); i++) p[i] = 0xDEADBEEF; *initial_sp = (Addr)&stack->bytes[VG_STACK_GUARD_SZB + VG_STACK_ACTIVE_SZB]; *initial_sp -= 8; *initial_sp &= ~((Addr)0x1F); VG_(debugLog)( 1,"aspacem","allocated thread stack at 0x%llx size %d\n", (ULong)(Addr)stack, szB); ML_(am_do_sanity_check)(); return stack; protect_failed: (void)ML_(am_do_munmap_NO_NOTIFY)( (Addr)stack, szB ); ML_(am_do_sanity_check)(); return NULL; }