| /linux/arch/sh/kernel/ |
| A D | perf_event.c | 28 struct cpu_hw_events { struct 34 DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events); argument 201 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sh_pmu_stop() 219 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sh_pmu_start() 236 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sh_pmu_del() 246 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sh_pmu_add() 336 struct cpu_hw_events *cpuhw = &per_cpu(cpu_hw_events, cpu); in sh_pmu_prepare_cpu() 338 memset(cpuhw, 0, sizeof(struct cpu_hw_events)); in sh_pmu_prepare_cpu()
|
| /linux/arch/x86/events/intel/ |
| A D | ds.c | 631 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_disable_bts() 648 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_drain_bts_buffer() 1010 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_pebs_sched_task() 1044 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in adaptive_pebs_record_size_update() 1162 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_pebs_add() 1177 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_pebs_via_pt_disable() 1188 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_pebs_via_pt_enable() 1213 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_pebs_enable() 1254 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_pebs_del() 1269 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_pebs_disable() [all …]
|
| A D | lbr.c | 190 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in __intel_pmu_lbr_enable() 259 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_lbr_reset() 431 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_lbr_restore() 495 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in __intel_pmu_lbr_restore() 523 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_lbr_save() 572 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in __intel_pmu_lbr_save() 612 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_lbr_sched_task() 649 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_lbr_add() 730 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_lbr_del() 752 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in vlbr_exclude_host() [all …]
|
| A D | core.c | 2150 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in __intel_pmu_disable_all() 2167 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in __intel_pmu_enable_all() 2195 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in __intel_pmu_snapshot_branch_stack() 2248 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_nhm_workaround() 2334 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_tfa_pmu_enable_all() 2367 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_set_masks() 2379 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_clear_masks() 2556 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in update_saved_topdown_regs() 2581 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_update_topdown_event() 2655 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in intel_pmu_read_topdown_event() [all …]
|
| A D | bts.c | 147 struct debug_store *ds = per_cpu(cpu_hw_events, cpu).ds; in bts_config_buffer() 186 struct debug_store *ds = per_cpu(cpu_hw_events, cpu).ds; in bts_update() 262 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in bts_event_start() 307 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in bts_event_stop() 452 struct debug_store *ds = this_cpu_ptr(&cpu_hw_events)->ds; in intel_bts_interrupt() 522 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in bts_event_add()
|
| A D | knc.c | 216 struct cpu_hw_events *cpuc; in knc_pmu_handle_irq() 221 cpuc = this_cpu_ptr(&cpu_hw_events); in knc_pmu_handle_irq()
|
| A D | p4.c | 919 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in p4_pmu_disable_all() 998 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in p4_pmu_enable_all() 1012 struct cpu_hw_events *cpuc; in p4_pmu_handle_irq() 1018 cpuc = this_cpu_ptr(&cpu_hw_events); in p4_pmu_handle_irq() 1217 static int p4_pmu_schedule_events(struct cpu_hw_events *cpuc, int n, int *assign) in p4_pmu_schedule_events()
|
| /linux/arch/x86/events/amd/ |
| A D | core.c | 354 static inline int amd_has_nb(struct cpu_hw_events *cpuc) in amd_has_nb() 515 struct cpu_hw_events *cpuc = &per_cpu(cpu_hw_events, cpu); in amd_pmu_cpu_prepare() 531 struct cpu_hw_events *cpuc = &per_cpu(cpu_hw_events, cpu); in amd_pmu_cpu_starting() 545 nb = per_cpu(cpu_hw_events, i).amd_nb; in amd_pmu_cpu_starting() 562 struct cpu_hw_events *cpuhw; in amd_pmu_cpu_dead() 567 cpuhw = &per_cpu(cpu_hw_events, cpu); in amd_pmu_cpu_dead() 610 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_pmu_disable_all() 696 amd_get_event_constraints(struct cpu_hw_events *cpuc, int idx, in amd_get_event_constraints() 708 static void amd_put_event_constraints(struct cpu_hw_events *cpuc, in amd_put_event_constraints() 1028 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in amd_pmu_enable_virt() [all …]
|
| /linux/arch/powerpc/perf/ |
| A D | core-fsl-emb.c | 19 struct cpu_hw_events { struct 25 static DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events); argument 192 struct cpu_hw_events *cpuhw; in fsl_emb_pmu_disable() 231 struct cpu_hw_events *cpuhw; in fsl_emb_pmu_enable() 278 struct cpu_hw_events *cpuhw; in fsl_emb_pmu_add() 285 cpuhw = &get_cpu_var(cpu_hw_events); in fsl_emb_pmu_add() 331 put_cpu_var(cpu_hw_events); in fsl_emb_pmu_add() 339 struct cpu_hw_events *cpuhw; in fsl_emb_pmu_del() 371 put_cpu_var(cpu_hw_events); in fsl_emb_pmu_del() 645 struct cpu_hw_events *cpuhw = this_cpu_ptr(&cpu_hw_events); in perf_event_interrupt() [all …]
|
| A D | core-book3s.c | 31 struct cpu_hw_events { struct 63 static DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events); argument 156 struct cpu_hw_events *cpuhw = this_cpu_ptr(&cpu_hw_events); in get_pmcs_ext_regs() 415 struct cpu_hw_events *cpuhw = this_cpu_ptr(&cpu_hw_events); in power_pmu_bhrb_enable() 431 struct cpu_hw_events *cpuhw = this_cpu_ptr(&cpu_hw_events); in power_pmu_bhrb_disable() 1265 struct cpu_hw_events *cpuhw; in power_pmu_disable() 1354 struct cpu_hw_events *cpuhw; in power_pmu_enable() 1748 struct cpu_hw_events *cpuhw = this_cpu_ptr(&cpu_hw_events); in power_pmu_start_txn() 1767 struct cpu_hw_events *cpuhw = this_cpu_ptr(&cpu_hw_events); in power_pmu_cancel_txn() 2304 struct cpu_hw_events *cpuhw = this_cpu_ptr(&cpu_hw_events); in __perf_event_interrupt() [all …]
|
| /linux/arch/x86/events/ |
| A D | core.c | 50 DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events) = { 677 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in x86_pmu_disable_all() 717 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in x86_pmu_disable() 734 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in x86_pmu_enable_all() 764 struct cpu_hw_events *cpuc = &per_cpu(cpu_hw_events, cpu); in x86_get_pmu() 1283 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in x86_pmu_enable() 1448 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in x86_pmu_add() 1506 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in x86_pmu_start() 1533 struct cpu_hw_events *cpuc = &per_cpu(cpu_hw_events, cpu); in perf_event_print_debug() 1593 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in x86_pmu_stop() [all …]
|
| A D | perf_event.h | 227 struct cpu_hw_events { struct 754 (*get_event_constraints)(struct cpu_hw_events *cpuc, 758 void (*put_event_constraints)(struct cpu_hw_events *cpuc, 761 void (*start_scheduling)(struct cpu_hw_events *cpuc); 765 void (*stop_scheduling)(struct cpu_hw_events *cpuc); 857 void (*lbr_read)(struct cpu_hw_events *cpuc); 1039 DECLARE_PER_CPU(struct cpu_hw_events, cpu_hw_events); 1267 x86_get_event_constraints(struct cpu_hw_events *cpuc, int idx, 1271 extern void intel_cpuc_finish(struct cpu_hw_events *cpuc); 1371 void intel_pmu_lbr_read_32(struct cpu_hw_events *cpuc); [all …]
|
| /linux/arch/alpha/kernel/ |
| A D | perf_event.c | 34 struct cpu_hw_events { struct 53 DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events); argument 391 static void maybe_change_configuration(struct cpu_hw_events *cpuc) in maybe_change_configuration() 435 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in alpha_pmu_add() 487 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in alpha_pmu_del() 535 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in alpha_pmu_stop() 555 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in alpha_pmu_start() 722 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in alpha_pmu_enable() 748 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in alpha_pmu_disable() 807 struct cpu_hw_events *cpuc; in alpha_perf_event_irq_handler() [all …]
|
| /linux/arch/sparc/kernel/ |
| A D | perf_event.c | 1031 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sparc_pmu_enable() 1049 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sparc_pmu_disable() 1083 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sparc_pmu_start() 1100 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sparc_pmu_stop() 1116 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sparc_pmu_del() 1151 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sparc_pmu_read() 1163 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in perf_stop_nmi_watchdog() 1374 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in sparc_pmu_add() 1514 struct cpu_hw_events *cpuhw = this_cpu_ptr(&cpu_hw_events); in sparc_pmu_start_txn() 1532 struct cpu_hw_events *cpuhw = this_cpu_ptr(&cpu_hw_events); in sparc_pmu_cancel_txn() [all …]
|
| /linux/arch/riscv/kernel/ |
| A D | perf_event.c | 36 static DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events); 299 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in riscv_pmu_add() 330 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in riscv_pmu_del()
|
| /linux/arch/mips/kernel/ |
| A D | perf_event_mipsxx.c | 31 struct cpu_hw_events { struct 48 DEFINE_PER_CPU(struct cpu_hw_events, cpu_hw_events) = { argument 314 static int mipsxx_pmu_alloc_counter(struct cpu_hw_events *cpuc, in mipsxx_pmu_alloc_counter() 351 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in mipsxx_pmu_enable_event() 397 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in mipsxx_pmu_disable_event() 501 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in mipspmu_add() 537 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in mipspmu_del() 763 struct cpu_hw_events fake_cpuc; in validate_group() 1548 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in pause_local_counters() 1564 struct cpu_hw_events *cpuc = this_cpu_ptr(&cpu_hw_events); in resume_local_counters() [all …]
|
| /linux/arch/riscv/include/asm/ |
| A D | perf_event.h | 45 struct cpu_hw_events { struct
|
| /linux/arch/x86/events/zhaoxin/ |
| A D | core.c | 357 struct cpu_hw_events *cpuc; in zhaoxin_pmu_handle_irq() 362 cpuc = this_cpu_ptr(&cpu_hw_events); in zhaoxin_pmu_handle_irq() 422 zhaoxin_get_event_constraints(struct cpu_hw_events *cpuc, int idx, in zhaoxin_get_event_constraints()
|
| /linux/arch/nds32/kernel/ |
| A D | perf_event_cpu.c | 33 static DEFINE_PER_CPU(struct pmu_hw_events, cpu_hw_events); 1068 return this_cpu_ptr(&cpu_hw_events); in cpu_pmu_get_cpu_events() 1109 struct pmu_hw_events *events = &per_cpu(cpu_hw_events, cpu); in cpu_pmu_init()
|