void test_VG_IS_XYZ_ALIGNED(void) { CHECK( VG_IS_2_ALIGNED(0x0) ); CHECK( ! VG_IS_2_ALIGNED(0x1) ); CHECK( VG_IS_2_ALIGNED(0x2) ); CHECK( ! VG_IS_2_ALIGNED(0x3) ); CHECK( VG_IS_2_ALIGNED(0x4) ); CHECK( ! VG_IS_2_ALIGNED(0x5) ); CHECK( VG_IS_2_ALIGNED(0x6) ); CHECK( ! VG_IS_2_ALIGNED(0x7) ); CHECK( VG_IS_2_ALIGNED(0x8) ); CHECK( ! VG_IS_2_ALIGNED(0x9) ); CHECK( VG_IS_2_ALIGNED(0xa) ); CHECK( ! VG_IS_2_ALIGNED(0xb) ); CHECK( VG_IS_2_ALIGNED(0xc) ); CHECK( ! VG_IS_2_ALIGNED(0xd) ); CHECK( VG_IS_2_ALIGNED(0xe) ); CHECK( ! VG_IS_2_ALIGNED(0xf) ); CHECK( VG_IS_4_ALIGNED(0x0) ); CHECK( ! VG_IS_4_ALIGNED(0x1) ); CHECK( ! VG_IS_4_ALIGNED(0x2) ); CHECK( ! VG_IS_4_ALIGNED(0x3) ); CHECK( VG_IS_4_ALIGNED(0x4) ); CHECK( ! VG_IS_4_ALIGNED(0x5) ); CHECK( ! VG_IS_4_ALIGNED(0x6) ); CHECK( ! VG_IS_4_ALIGNED(0x7) ); CHECK( VG_IS_4_ALIGNED(0x8) ); CHECK( ! VG_IS_4_ALIGNED(0x9) ); CHECK( ! VG_IS_4_ALIGNED(0xa) ); CHECK( ! VG_IS_4_ALIGNED(0xb) ); CHECK( VG_IS_4_ALIGNED(0xc) ); CHECK( ! VG_IS_4_ALIGNED(0xd) ); CHECK( ! VG_IS_4_ALIGNED(0xe) ); CHECK( ! VG_IS_4_ALIGNED(0xf) ); CHECK( VG_IS_8_ALIGNED(0x0) ); CHECK( ! VG_IS_8_ALIGNED(0x1) ); CHECK( ! VG_IS_8_ALIGNED(0x2) ); CHECK( ! VG_IS_8_ALIGNED(0x3) ); CHECK( ! VG_IS_8_ALIGNED(0x4) ); CHECK( ! VG_IS_8_ALIGNED(0x5) ); CHECK( ! VG_IS_8_ALIGNED(0x6) ); CHECK( ! VG_IS_8_ALIGNED(0x7) ); CHECK( VG_IS_8_ALIGNED(0x8) ); CHECK( ! VG_IS_8_ALIGNED(0x9) ); CHECK( ! VG_IS_8_ALIGNED(0xa) ); CHECK( ! VG_IS_8_ALIGNED(0xb) ); CHECK( ! VG_IS_8_ALIGNED(0xc) ); CHECK( ! VG_IS_8_ALIGNED(0xd) ); CHECK( ! VG_IS_8_ALIGNED(0xe) ); CHECK( ! VG_IS_8_ALIGNED(0xf) ); CHECK( VG_IS_16_ALIGNED(0x0) ); CHECK( ! VG_IS_16_ALIGNED(0x1) ); CHECK( ! VG_IS_16_ALIGNED(0x2) ); CHECK( ! VG_IS_16_ALIGNED(0x3) ); CHECK( ! VG_IS_16_ALIGNED(0x4) ); CHECK( ! VG_IS_16_ALIGNED(0x5) ); CHECK( ! VG_IS_16_ALIGNED(0x6) ); CHECK( ! VG_IS_16_ALIGNED(0x7) ); CHECK( ! VG_IS_16_ALIGNED(0x8) ); CHECK( ! VG_IS_16_ALIGNED(0x9) ); CHECK( ! VG_IS_16_ALIGNED(0xa) ); CHECK( ! VG_IS_16_ALIGNED(0xb) ); CHECK( ! VG_IS_16_ALIGNED(0xc) ); CHECK( ! VG_IS_16_ALIGNED(0xd) ); CHECK( ! VG_IS_16_ALIGNED(0xe) ); CHECK( ! VG_IS_16_ALIGNED(0xf) ); CHECK( VG_IS_WORD_ALIGNED(0x0) ); CHECK( ! VG_IS_WORD_ALIGNED(0x1) ); CHECK( ! VG_IS_WORD_ALIGNED(0x2) ); CHECK( ! VG_IS_WORD_ALIGNED(0x3) ); // 0x4 case below CHECK( ! VG_IS_WORD_ALIGNED(0x5) ); CHECK( ! VG_IS_WORD_ALIGNED(0x6) ); CHECK( ! VG_IS_WORD_ALIGNED(0x7) ); CHECK( VG_IS_WORD_ALIGNED(0x8) ); CHECK( ! VG_IS_WORD_ALIGNED(0x9) ); CHECK( ! VG_IS_WORD_ALIGNED(0xa) ); CHECK( ! VG_IS_WORD_ALIGNED(0xb) ); // 0xc case below CHECK( ! VG_IS_WORD_ALIGNED(0xd) ); CHECK( ! VG_IS_WORD_ALIGNED(0xe) ); CHECK( ! VG_IS_WORD_ALIGNED(0xf) ); if (4 == sizeof(void*)) { CHECK( VG_IS_WORD_ALIGNED(0x4) ); CHECK( VG_IS_WORD_ALIGNED(0xc) ); } else if (8 == sizeof(void*)) { CHECK( ! VG_IS_WORD_ALIGNED(0x4) ); CHECK( ! VG_IS_WORD_ALIGNED(0xc) ); } else { assert(0); } CHECK( VG_IS_PAGE_ALIGNED(0x0) ); CHECK( ! VG_IS_PAGE_ALIGNED(0x1) ); CHECK( ! VG_IS_PAGE_ALIGNED(0x2) ); CHECK( ! VG_IS_PAGE_ALIGNED(0x3) ); CHECK( ! VG_IS_PAGE_ALIGNED(0x4) ); CHECK( ! VG_IS_PAGE_ALIGNED(VKI_PAGE_SIZE-1) ); CHECK( VG_IS_PAGE_ALIGNED(VKI_PAGE_SIZE ) ); CHECK( ! VG_IS_PAGE_ALIGNED(VKI_PAGE_SIZE+1) ); }
/* EXPORTED */ void VG_(sigframe_create)( ThreadId tid, Addr sp_top_of_frame, const vki_siginfo_t *siginfo, const struct vki_ucontext *siguc, void *handler, UInt flags, const vki_sigset_t *mask, void *restorer ) { Addr sp; ThreadState* tst = VG_(get_ThreadState)(tid); Int sigNo = siginfo->si_signo; struct vg_sig_private *priv; /* Stack must be 8-byte aligned */ sp_top_of_frame &= ~0xf; if (flags & VKI_SA_SIGINFO) { sp = sp_top_of_frame - sizeof(struct rt_sigframe); } else { sp = sp_top_of_frame - sizeof(struct sigframe); } tst = VG_(get_ThreadState)(tid); if (!extend(tst, sp, sp_top_of_frame - sp)) return; vg_assert(VG_IS_8_ALIGNED(sp)); if (flags & VKI_SA_SIGINFO) { struct rt_sigframe *frame = (struct rt_sigframe *) sp; struct vki_ucontext *ucp = &frame->rs_uc; if (VG_(clo_trace_signals)) VG_(printf)("rt_sigframe\n"); /* Create siginfo. */ VG_TRACK( pre_mem_write, Vg_CoreSignal, tid, "signal frame siginfo", (Addr)&frame->rs_info, sizeof(frame->rs_info) ); VG_(memcpy)(&frame->rs_info, siginfo, sizeof(*siginfo)); VG_TRACK( post_mem_write, Vg_CoreSignal, tid, (Addr)&frame->rs_info, sizeof(frame->rs_info) ); /* Create the ucontext. */ VG_TRACK( pre_mem_write, Vg_CoreSignal, tid, "signal frame ucontext", (Addr)ucp, offsetof(struct vki_ucontext, uc_mcontext) ); ucp->uc_flags = 0; ucp->uc_link = 0; ucp->uc_stack = tst->altstack; VG_TRACK( post_mem_write, Vg_CoreSignal, tid, (Addr)ucp, offsetof(struct vki_ucontext, uc_mcontext) ); struct vki_sigcontext *scp = &(frame->rs_uc.uc_mcontext); setup_sigcontext2(tst, &(scp), siginfo); ucp->uc_sigmask = tst->sig_mask; priv = &frame->priv; /* * Arguments to signal handler: * * a0 = signal number * a1 = 0 (should be cause) * a2 = pointer to ucontext * * $25 and c0_epc point to the signal handler, $29 points to * the struct rt_sigframe. */ tst->arch.vex.guest_r4 = siginfo->si_signo; tst->arch.vex.guest_r5 = (Addr) &frame->rs_info; tst->arch.vex.guest_r6 = (Addr) &frame->rs_uc; tst->arch.vex.guest_r29 = (Addr) frame; tst->arch.vex.guest_r25 = (Addr) handler; if (flags & VKI_SA_RESTORER) { tst->arch.vex.guest_r31 = (Addr) restorer; } else { tst->arch.vex.guest_r31 = (Addr)&VG_(mips32_linux_SUBST_FOR_rt_sigreturn); } } else { if (VG_(clo_trace_signals))
/* EXPORTED */ void VG_(sigframe_create)( ThreadId tid, Addr sp_top_of_frame, const vki_siginfo_t *siginfo, const struct vki_ucontext *siguc, void *handler, UInt flags, const vki_sigset_t *mask, void *restorer ) { Addr sp; ThreadState* tst; Addr faultaddr; Int sigNo = siginfo->si_signo; struct vg_sig_private *priv; /* Stack must be 8-byte aligned */ sp_top_of_frame &= ~0x7ULL; sp = sp_top_of_frame - sizeof(struct rt_sigframe); tst = VG_(get_ThreadState)(tid); if (! ML_(sf_maybe_extend_stack)(tst, sp, sizeof(struct rt_sigframe), flags)) return; vg_assert(VG_IS_8_ALIGNED(sp)); /* SIGILL defines addr to be the faulting address */ faultaddr = (Addr)siginfo->_sifields._sigfault._addr; if (sigNo == VKI_SIGILL && siginfo->si_code > 0) faultaddr = tst->arch.vex.guest_pc; struct rt_sigframe *frame = (struct rt_sigframe *) sp; struct vki_ucontext *ucp = &frame->rs_uc; if (VG_(clo_trace_signals)) VG_(printf)("rt_sigframe\n"); /* Create siginfo. */ VG_TRACK( pre_mem_write, Vg_CoreSignal, tid, "signal frame siginfo", (Addr)&frame->rs_info, sizeof(frame->rs_info) ); VG_(memcpy)(&frame->rs_info, siginfo, sizeof(*siginfo)); VG_TRACK( post_mem_write, Vg_CoreSignal, tid, (Addr)&frame->rs_info, sizeof(frame->rs_info) ); /* Create the ucontext. */ VG_TRACK( pre_mem_write, Vg_CoreSignal, tid, "signal frame ucontext", (Addr)ucp, offsetof(struct vki_ucontext, uc_mcontext) ); ucp->uc_flags = 0; ucp->uc_link = 0; ucp->uc_stack = tst->altstack; VG_TRACK( post_mem_write, Vg_CoreSignal, tid, (Addr)ucp, offsetof(struct vki_ucontext, uc_mcontext) ); struct vki_sigcontext *scp = &(frame->rs_uc.uc_mcontext); setup_sigcontext2(tst, &(scp), siginfo); ucp->uc_sigmask = tst->sig_mask; priv = &frame->priv; /* * Arguments to signal handler: * * r0 = signal number * r1 = 0 (should be cause) * r2 = pointer to ucontext * * r54 points to the struct rt_sigframe. */ tst->arch.vex.guest_r0 = siginfo->si_signo; tst->arch.vex.guest_r1 = (Addr) &frame->rs_info; tst->arch.vex.guest_r2 = (Addr) &frame->rs_uc; tst->arch.vex.guest_r54 = (Addr) frame; if (flags & VKI_SA_RESTORER) { tst->arch.vex.guest_r55 = (Addr) restorer; } else { tst->arch.vex.guest_r55 = (Addr)&VG_(tilegx_linux_SUBST_FOR_rt_sigreturn); } priv->magicPI = 0x31415927; priv->sigNo_private = sigNo; priv->vex_shadow1 = tst->arch.vex_shadow1; priv->vex_shadow2 = tst->arch.vex_shadow2; /* Set the thread so it will next run the handler. */ /* tst->m_sp = sp; also notify the tool we've updated SP */ VG_TRACK( post_reg_write, Vg_CoreSignal, tid, VG_O_STACK_PTR, sizeof(Addr)); if (VG_(clo_trace_signals)) VG_(printf)("handler = %p\n", handler); tst->arch.vex.guest_pc = (Addr) handler; /* This thread needs to be marked runnable, but we leave that the caller to do. */ if (0) VG_(printf)("pushed signal frame; sp now = %lx, " "next %pc = %lx, status=%d\n", (Addr)frame, tst->arch.vex.guest_pc, tst->status); }
void VG_(sigframe_create)( ThreadId tid, Addr sp_top_of_frame, const vki_siginfo_t *siginfo, const struct vki_ucontext *siguc, void *handler, UInt flags, const vki_sigset_t *mask, void *restorer ) { Addr sp; ThreadState* tst = VG_(get_ThreadState)(tid); Addr faultaddr; Int sigNo = siginfo->si_signo; struct vg_sig_private *priv; sp_top_of_frame &= ~0xf; if (flags & VKI_SA_SIGINFO) { sp = sp_top_of_frame - sizeof(struct rt_sigframe); } else { sp = sp_top_of_frame - sizeof(struct sigframe); } tst = VG_(get_ThreadState)(tid); if (!extend(tst, sp, sp_top_of_frame - sp)) return; vg_assert(VG_IS_8_ALIGNED(sp)); faultaddr = (Addr)siginfo->_sifields._sigfault._addr; if (sigNo == VKI_SIGILL && siginfo->si_code > 0) faultaddr = tst->arch.vex.guest_PC; if (flags & VKI_SA_SIGINFO) { struct rt_sigframe *frame = (struct rt_sigframe *) sp; struct vki_ucontext *ucp = &frame->rs_uc; if (VG_(clo_trace_signals)) VG_(printf)("rt_sigframe\n"); VG_TRACK( pre_mem_write, Vg_CoreSignal, tid, "signal frame siginfo", (Addr)&frame->rs_info, sizeof(frame->rs_info) ); VG_(memcpy)(&frame->rs_info, siginfo, sizeof(*siginfo)); VG_TRACK( post_mem_write, Vg_CoreSignal, tid, (Addr)&frame->rs_info, sizeof(frame->rs_info) ); VG_TRACK( pre_mem_write, Vg_CoreSignal, tid, "signal frame ucontext", (Addr)ucp, offsetof(struct vki_ucontext, uc_mcontext) ); ucp->uc_flags = 0; ucp->uc_link = 0; ucp->uc_stack = tst->altstack; VG_TRACK( post_mem_write, Vg_CoreSignal, tid, (Addr)ucp, offsetof(struct vki_ucontext, uc_mcontext) ); struct vki_sigcontext *scp = &(frame->rs_uc.uc_mcontext); setup_sigcontext2(tst, &(scp), siginfo); ucp->uc_sigmask = tst->sig_mask; priv = &frame->priv; tst->arch.vex.guest_r4 = siginfo->si_signo; tst->arch.vex.guest_r5 = (Addr) &frame->rs_info; tst->arch.vex.guest_r6 = (Addr) &frame->rs_uc; tst->arch.vex.guest_r29 = (Addr) frame; tst->arch.vex.guest_r25 = (Addr) handler; if (flags & VKI_SA_RESTORER) { tst->arch.vex.guest_r31 = (Addr) restorer; } else { tst->arch.vex.guest_r31 = (Addr)&VG_(mips32_linux_SUBST_FOR_rt_sigreturn); } } else { if (VG_(clo_trace_signals))