static int __armv8_pmuv3_map_event(struct perf_event *event, const unsigned (*extra_event_map) [PERF_COUNT_HW_MAX], const unsigned (*extra_cache_map) [PERF_COUNT_HW_CACHE_MAX] [PERF_COUNT_HW_CACHE_OP_MAX] [PERF_COUNT_HW_CACHE_RESULT_MAX]) { int hw_event_id; struct arm_pmu *armpmu = to_arm_pmu(event->pmu); hw_event_id = armpmu_map_event(event, &armv8_pmuv3_perf_map, &armv8_pmuv3_perf_cache_map, ARMV8_PMU_EVTYPE_EVENT); if (armv8pmu_event_is_64bit(event)) event->hw.flags |= ARMPMU_EVT_64BIT; /* Onl expose micro/arch events supported by this PMU */ if ((hw_event_id > 0) && (hw_event_id < ARMV8_PMUV3_MAX_COMMON_EVENTS) && test_bit(hw_event_id, armpmu->pmceid_bitmap)) { return hw_event_id; } return armpmu_map_event(event, extra_event_map, extra_cache_map, ARMV8_PMU_EVTYPE_EVENT); }
static int armv8_pmuv3_map_event(struct perf_event *event) { int hw_event_id; struct arm_pmu *armpmu = to_arm_pmu(event->pmu); hw_event_id = armpmu_map_event(event, &armv8_pmuv3_perf_map, &armv8_pmuv3_perf_cache_map, ARMV8_PMU_EVTYPE_EVENT); if (hw_event_id < 0) return hw_event_id; /* disable micro/arch events not supported by this PMU */ if ((hw_event_id < ARMV8_PMUV3_MAX_COMMON_EVENTS) && !test_bit(hw_event_id, armpmu->pmceid_bitmap)) { return -EOPNOTSUPP; } return hw_event_id; }
static int armv8_vulcan_map_event(struct perf_event *event) { return armpmu_map_event(event, &armv8_vulcan_perf_map, &armv8_vulcan_perf_cache_map, ARMV8_PMU_EVTYPE_EVENT); }
static int krait_8960_map_event(struct perf_event *event) { return armpmu_map_event(event, &armv7_krait_perf_map, &armv7_krait_perf_cache_map, 0xfffff); }