/* Sample the counters but do not suspend them. */ static void vperfctr_sample(struct vperfctr *perfctr) { if (IS_RUNNING(perfctr)) { perfctr_cpu_sample(&perfctr->cpu_state); vperfctr_reset_sampling_timer(perfctr); } }
/* PRE: perfctr == current->thread.perfctr && IS_RUNNING(perfctr) * Restart the counters. */ static inline void vperfctr_resume(struct vperfctr *perfctr) { debug_resume(perfctr); perfctr_cpu_resume(&perfctr->cpu_state); vperfctr_reset_sampling_timer(perfctr); debug_set_smp_id(perfctr); }
/* Sample the counters but do not suspend them. */ static void vperfctr_sample(struct vperfctr *perfctr) { if (IS_RUNNING(perfctr)) { debug_check_smp_id(perfctr); perfctr_cpu_sample(&perfctr->cpu_state); vperfctr_reset_sampling_timer(perfctr); } }
/* Sample the counters but do not suspend them. */ static inline void vperfctr_sample(struct vperfctr *perfctr) { if (IS_RUNNING(perfctr)) { // logical place to see if the counters are ours else return perfctr_cpu_sample(&perfctr->cpu_state); vperfctr_reset_sampling_timer(perfctr); } }
/* PRE: perfctr == current->thread.perfctr && IS_RUNNING(perfctr) * Restart the counters. */ static inline void vperfctr_resume(struct vperfctr *perfctr) { perfctr_cpu_resume(&perfctr->cpu_state); vperfctr_reset_sampling_timer(perfctr); }