void backtrace(struct pt_regs * const orig_regs, unsigned int cpu, pid_t pid, pid_t tid, PXD32_CSS_Call_Stack_V2 * css_data) { unsigned int u_depth = 0; unsigned int k_depth = 0; struct pt_regs * regs = orig_regs; css_data->depth = 0; /* if it is a swapper process, we don't backtrace it. Or it will result in bug CPA-310 */ if (current->tgid == 0) { css_data->depth = 1; css_data->cs[0].address = orig_regs->ARM_pc; css_data->cs[0].pid = pid; return; } /* kernel space call stack back trace */ if (!user_mode(regs)) { k_depth = kernel_backtrace(regs, cpu, pid, tid, css_data); /* if the last instruction in user mode is SWI then in system call. * if in system call, trace back to user space. */ regs = GET_USERREG(); if(!is_swi_instruction(regs->ARM_pc - arm_inst_size())) { g_sample_count++; return; } } u_depth = user_backtrace(regs, cpu, pid, tid, k_depth, css_data); g_sample_count++; }
void float_raise(signed char flags) { register unsigned int fpsr, cumulativeTraps; #ifdef CONFIG_DEBUG_USER printk(KERN_DEBUG "NWFPE: %s[%d] takes exception %08x at %p from %08lx\n", current->comm, current->pid, flags, __builtin_return_address(0), GET_USERREG()[15]); #endif /* Keep SoftFloat exception flags up to date. */ float_exception_flags |= flags; /* Read fpsr and initialize the cumulativeTraps. */ fpsr = readFPSR(); cumulativeTraps = 0; /* For each type of exception, the cumulative trap exception bit is only set if the corresponding trap enable bit is not set. */ if ((!(fpsr & BIT_IXE)) && (flags & BIT_IXC)) cumulativeTraps |= BIT_IXC; if ((!(fpsr & BIT_UFE)) && (flags & BIT_UFC)) cumulativeTraps |= BIT_UFC; if ((!(fpsr & BIT_OFE)) && (flags & BIT_OFC)) cumulativeTraps |= BIT_OFC; if ((!(fpsr & BIT_DZE)) && (flags & BIT_DZC)) cumulativeTraps |= BIT_DZC; if ((!(fpsr & BIT_IOE)) && (flags & BIT_IOC)) cumulativeTraps |= BIT_IOC; /* Set the cumulative exceptions flags. */ if (cumulativeTraps) writeFPSR(fpsr | cumulativeTraps); /* Raise an exception if necessary. */ if (fpsr & (flags << 16)) fp_send_sig(SIGFPE, current, 1); }