| /linux/drivers/net/ethernet/microsoft/mana/ |
| A D | hw_channel.c | 13 down(&hwc->sema); in mana_hwc_get_msg_index() 369 hwc_cq->hwc = hwc; in mana_hwc_create_cq() 483 hwc_wq->hwc = hwc; in mana_hwc_create_wq() 646 mana_hwc_tx_event_handler, hwc, &hwc->cq); in mana_hwc_init_queues() 653 hwc->cq, &hwc->rxq); in mana_hwc_init_queues() 660 hwc->cq, &hwc->txq); in mana_hwc_init_queues() 683 hwc = kzalloc(sizeof(*hwc), GFP_KERNEL); in mana_hwc_create_channel() 684 if (!hwc) in mana_hwc_create_channel() 735 if (!hwc) in mana_hwc_destroy_channel() 750 mana_hwc_destroy_wq(hwc, hwc->txq); in mana_hwc_destroy_channel() [all …]
|
| /linux/drivers/net/ethernet/mellanox/mlx5/core/sf/ |
| A D | hw_table.c | 48 struct mlx5_sf_hwc_table *hwc; in mlx5_sf_sw_to_hw_id() local 65 if (table->hwc[i].max_fn && in mlx5_sf_table_fn_to_hwc() 67 fn_id < (table->hwc[i].start_fn_id + table->hwc[i].max_fn)) in mlx5_sf_table_fn_to_hwc() 68 return &table->hwc[i]; in mlx5_sf_table_fn_to_hwc() 81 if (!hwc->sfs) in mlx5_sf_hw_table_id_alloc() 90 if (hwc->sfs[i].allocated && hwc->sfs[i].usr_sfnum == usr_sfnum) in mlx5_sf_hw_table_id_alloc() 235 hwc->sfs = sfs; in mlx5_sf_hw_table_hwc_init() 236 hwc->max_fn = max_fn; in mlx5_sf_hw_table_hwc_init() 243 kfree(hwc->sfs); in mlx5_sf_hw_table_hwc_cleanup() 322 if (!hwc) in mlx5_sf_hw_vhca_event() [all …]
|
| /linux/drivers/iio/buffer/ |
| A D | industrialio-hw-consumer.c | 86 struct iio_hw_consumer *hwc; in iio_hw_consumer_alloc() local 90 hwc = kzalloc(sizeof(*hwc), GFP_KERNEL); in iio_hw_consumer_alloc() 91 if (!hwc) in iio_hw_consumer_alloc() 94 INIT_LIST_HEAD(&hwc->buffers); in iio_hw_consumer_alloc() 97 if (IS_ERR(hwc->channels)) { in iio_hw_consumer_alloc() 98 ret = PTR_ERR(hwc->channels); in iio_hw_consumer_alloc() 102 chan = &hwc->channels[0]; in iio_hw_consumer_alloc() 113 return hwc; in iio_hw_consumer_alloc() 118 iio_channel_release_all(hwc->channels); in iio_hw_consumer_alloc() 120 kfree(hwc); in iio_hw_consumer_alloc() [all …]
|
| /linux/arch/alpha/kernel/ |
| A D | perf_event.c | 414 int idx = hwc->idx; in maybe_change_configuration() 528 alpha_perf_event_update(event, hwc, hwc->idx, 0); in alpha_pmu_read() 543 alpha_perf_event_update(event, hwc, hwc->idx, 0); in alpha_pmu_stop() 562 alpha_perf_event_set_period(event, hwc, hwc->idx); in alpha_pmu_start() 565 hwc->state = 0; in alpha_pmu_start() 642 hwc->event_base = ev; in __hw_perf_event_init() 663 hwc->config_base = 0; in __hw_perf_event_init() 664 hwc->idx = PMC_NO_INDEX; in __hw_perf_event_init() 680 hwc->last_period = hwc->sample_period; in __hw_perf_event_init() 681 local64_set(&hwc->period_left, hwc->sample_period); in __hw_perf_event_init() [all …]
|
| /linux/arch/s390/include/asm/ |
| A D | perf_event.h | 68 #define OVERFLOW_REG(hwc) ((hwc)->extra_reg.config) argument 69 #define SFB_ALLOC_REG(hwc) ((hwc)->extra_reg.alloc) argument 70 #define TEAR_REG(hwc) ((hwc)->last_tag) argument 71 #define SAMPL_RATE(hwc) ((hwc)->event_base) argument 72 #define SAMPL_FLAGS(hwc) ((hwc)->config_base) argument 73 #define SAMPL_DIAG_MODE(hwc) (SAMPL_FLAGS(hwc) & PERF_CPUM_SF_DIAG_MODE) argument 74 #define SDB_FULL_BLOCKS(hwc) (SAMPL_FLAGS(hwc) & PERF_CPUM_SF_FULL_BLOCKS) argument 75 #define SAMPLE_FREQ_MODE(hwc) (SAMPL_FLAGS(hwc) & PERF_CPUM_SF_FREQ_MODE) argument
|
| /linux/arch/x86/events/amd/ |
| A D | ibs.c | 117 hwc->last_period = period; in perf_event_set_period() 124 hwc->last_period = period; in perf_event_set_period() 273 if (hwc->sample_period) { in perf_ibs_init() 285 if (!hwc->sample_period) in perf_ibs_init() 294 if (!hwc->sample_period) in perf_ibs_init() 301 hwc->last_period = hwc->sample_period; in perf_ibs_init() 302 local64_set(&hwc->period_left, hwc->sample_period); in perf_ibs_init() 305 hwc->config = config; in perf_ibs_init() 412 hwc->state = 0; in perf_ibs_start() 615 hwc = &event->hw; in perf_ibs_handle_irq() [all …]
|
| A D | iommu.c | 241 u8 bank = hwc->iommu_bank; in perf_iommu_enable_event() 242 u8 cntr = hwc->iommu_cntr; in perf_iommu_enable_event() 245 reg = GET_CSOURCE(hwc); in perf_iommu_enable_event() 248 reg = GET_DEVID_MASK(hwc); in perf_iommu_enable_event() 254 reg = GET_PASID_MASK(hwc); in perf_iommu_enable_event() 260 reg = GET_DOMID_MASK(hwc); in perf_iommu_enable_event() 273 amd_iommu_pc_set_reg(iommu, hwc->iommu_bank, hwc->iommu_cntr, in perf_iommu_disable_event() 285 hwc->state = 0; in perf_iommu_start() 302 amd_iommu_pc_set_reg(iommu, hwc->iommu_bank, hwc->iommu_cntr, in perf_iommu_start() 315 if (amd_iommu_pc_get_reg(iommu, hwc->iommu_bank, hwc->iommu_cntr, in perf_iommu_read() [all …]
|
| A D | uncore.c | 106 wrmsrl(hwc->event_base, (u64)local64_read(&hwc->prev_count)); in amd_uncore_start() 108 hwc->state = 0; in amd_uncore_start() 117 wrmsrl(hwc->config_base, hwc->config); in amd_uncore_stop() 133 if (hwc->idx != -1 && uncore->events[hwc->idx] == event) in amd_uncore_add() 138 hwc->idx = i; in amd_uncore_add() 144 hwc->idx = -1; in amd_uncore_add() 156 hwc->config_base = uncore->msr_base + (2 * hwc->idx); in amd_uncore_add() 157 hwc->event_base = uncore->msr_base + 1 + (2 * hwc->idx); in amd_uncore_add() 158 hwc->event_base_rdpmc = uncore->rdpmc_base + hwc->idx; in amd_uncore_add() 180 hwc->idx = -1; in amd_uncore_del() [all …]
|
| /linux/arch/arc/kernel/ |
| A D | perf_event.c | 174 hwc->last_period = hwc->sample_period; in arc_pmu_event_init() 175 local64_set(&hwc->period_left, hwc->sample_period); in arc_pmu_event_init() 178 hwc->config = 0; in arc_pmu_event_init() 248 int idx = hwc->idx; in arc_pmu_event_set_period() 256 hwc->last_period = period; in arc_pmu_event_set_period() 292 int idx = hwc->idx; in arc_pmu_start() 300 hwc->state = 0; in arc_pmu_start() 317 int idx = hwc->idx; in arc_pmu_stop() 364 int idx = hwc->idx; in arc_pmu_add() 371 hwc->idx = idx; in arc_pmu_add() [all …]
|
| /linux/drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_pmu.c | 237 hwc->state = 0; in amdgpu_perf_start() 239 switch (hwc->config_base) { in amdgpu_perf_start() 249 hwc->idx = target_cntr; in amdgpu_perf_start() 253 hwc->idx, 0); in amdgpu_perf_start() 278 hwc->config, hwc->idx, &count); in amdgpu_perf_read() 300 switch (hwc->config_base) { in amdgpu_perf_stop() 303 pe->adev->df.funcs->pmc_stop(pe->adev, hwc->config, hwc->idx, in amdgpu_perf_stop() 334 hwc->config_base = (hwc->config >> in amdgpu_perf_add() 342 switch (hwc->config_base) { in amdgpu_perf_add() 351 hwc->idx = target_cntr; in amdgpu_perf_add() [all …]
|
| /linux/drivers/perf/hisilicon/ |
| A D | hisi_uncore_pmu.c | 187 struct hw_perf_event *hwc = &event->hw; in hisi_uncore_pmu_event_init() local 226 hwc->idx = -1; in hisi_uncore_pmu_event_init() 227 hwc->config_base = event->attr.config; in hisi_uncore_pmu_event_init() 243 struct hw_perf_event *hwc = &event->hw; in hisi_uncore_pmu_enable_event() local 261 struct hw_perf_event *hwc = &event->hw; in hisi_uncore_pmu_disable_event() local 273 struct hw_perf_event *hwc = &event->hw; in hisi_uncore_pmu_set_event_period() local 284 local64_set(&hwc->prev_count, val); in hisi_uncore_pmu_set_event_period() 320 hwc->state = 0; in hisi_uncore_pmu_start() 340 hwc->state |= PERF_HES_STOPPED; in hisi_uncore_pmu_stop() 342 if (hwc->state & PERF_HES_UPTODATE) in hisi_uncore_pmu_stop() [all …]
|
| A D | hisi_uncore_ddrc_pmu.c | 59 #define GET_DDRC_EVENTID(hwc) (hwc->config_base & 0x7) argument 96 struct hw_perf_event *hwc) in hisi_ddrc_pmu_v2_read_counter() argument 212 val |= 1 << hwc->idx; in hisi_ddrc_pmu_v2_enable_counter() 222 val &= ~(1 << hwc->idx); in hisi_ddrc_pmu_v2_disable_counter() 227 struct hw_perf_event *hwc) in hisi_ddrc_pmu_v1_enable_counter_int() argument 233 val &= ~(1 << hwc->idx); in hisi_ddrc_pmu_v1_enable_counter_int() 244 val |= 1 << hwc->idx; in hisi_ddrc_pmu_v1_disable_counter_int() 249 struct hw_perf_event *hwc) in hisi_ddrc_pmu_v2_enable_counter_int() argument 254 val &= ~(1 << hwc->idx); in hisi_ddrc_pmu_v2_enable_counter_int() 259 struct hw_perf_event *hwc) in hisi_ddrc_pmu_v2_disable_counter_int() argument [all …]
|
| /linux/arch/xtensa/kernel/ |
| A D | perf_event.c | 163 s64 period = hwc->sample_period; in xtensa_perf_event_set_period() 169 hwc->last_period = period; in xtensa_perf_event_set_period() 174 hwc->last_period = period; in xtensa_perf_event_set_period() 181 local64_set(&hwc->prev_count, -left); in xtensa_perf_event_set_period() 243 int idx = hwc->idx; in xtensa_pmu_start() 253 hwc->state = 0; in xtensa_pmu_start() 261 int idx = hwc->idx; in xtensa_pmu_stop() 267 hwc->state |= PERF_HES_STOPPED; in xtensa_pmu_stop() 285 int idx = hwc->idx; in xtensa_pmu_add() 294 hwc->idx = idx; in xtensa_pmu_add() [all …]
|
| /linux/arch/riscv/kernel/ |
| A D | perf_event.c | 216 struct hw_perf_event *hwc = &event->hw; in riscv_pmu_read() local 219 int idx = hwc->idx; in riscv_pmu_read() 256 hwc->state |= PERF_HES_STOPPED; in riscv_pmu_stop() 260 hwc->state |= PERF_HES_UPTODATE; in riscv_pmu_stop() 283 hwc->state = 0; in riscv_pmu_start() 291 local64_set(&hwc->prev_count, read_counter(hwc->idx)); in riscv_pmu_start() 313 hwc->idx = hwc->config; in riscv_pmu_add() 314 cpuc->events[hwc->idx] = event; in riscv_pmu_add() 333 cpuc->events[hwc->idx] = NULL; in riscv_pmu_del() 427 hwc->config = code; in riscv_event_init() [all …]
|
| /linux/drivers/perf/ |
| A D | thunderx2_pmu.c | 379 local64_set(&hwc->prev_count, 0); in uncore_start_event_l3c() 380 reg_writel(0, hwc->event_base); in uncore_start_event_l3c() 409 local64_set(&hwc->prev_count, 0); in uncore_start_event_dmc() 410 reg_writel(0, hwc->event_base); in uncore_start_event_dmc() 601 hwc->config = event->attr.config; in tx2_uncore_event_init() 615 hwc->state = 0; in tx2_uncore_event_start() 645 hwc->state |= PERF_HES_STOPPED; in tx2_uncore_event_stop() 648 hwc->state |= PERF_HES_UPTODATE; in tx2_uncore_event_stop() 661 if (hwc->idx < 0) in tx2_uncore_event_add() 688 tx2_pmu->events[hwc->idx] = NULL; in tx2_uncore_event_del() [all …]
|
| A D | arm_pmu.c | 209 hwc->last_period = period; in armpmu_event_set_period() 296 hwc->state = 0; in armpmu_start() 314 int idx = hwc->idx; in armpmu_del() 321 hwc->idx = -1; in armpmu_del() 443 hwc->flags = 0; in __hw_perf_event_init() 458 hwc->idx = -1; in __hw_perf_event_init() 459 hwc->config_base = 0; in __hw_perf_event_init() 460 hwc->config = 0; in __hw_perf_event_init() 461 hwc->event_base = 0; in __hw_perf_event_init() 486 hwc->last_period = hwc->sample_period; in __hw_perf_event_init() [all …]
|
| A D | qcom_l2_pmu.c | 301 u32 idx = hwc->idx; in l2_cache_event_update() 322 u32 idx = hwc->idx; in l2_cache_cluster_set_period() 378 int idx = hwc->idx; in l2_cache_clear_event_idx() 407 hwc = &event->hw; in l2_cache_handle_irq() 450 if (hwc->sample_period) { in l2_cache_event_init() 529 hwc->idx = -1; in l2_cache_event_init() 545 int idx = hwc->idx; in l2_cache_event_start() 549 hwc->state = 0; in l2_cache_event_start() 575 int idx = hwc->idx; in l2_cache_event_stop() 601 hwc->idx = idx; in l2_cache_event_add() [all …]
|
| /linux/arch/nds32/kernel/ |
| A D | perf_event_cpu.c | 265 hwc = &event->hw; in nds32_pmu_handle_irq() 419 int idx = hwc->idx; in nds32_pmu_enable_event() 470 int idx = hwc->idx; in nds32_pmu_disable_event() 499 int idx = hwc->idx; in nds32_pmu_read_counter() 528 int idx = hwc->idx; in nds32_pmu_write_counter() 816 hwc->idx = -1; in __hw_perf_event_init() 818 hwc->config = 0; in __hw_perf_event_init() 845 hwc->last_period = hwc->sample_period; in __hw_perf_event_init() 846 local64_set(&hwc->period_left, hwc->sample_period); in __hw_perf_event_init() 899 hwc->state = 0; in nds32_start() [all …]
|
| /linux/arch/x86/events/intel/ |
| A D | uncore_nhmex.c | 247 struct hw_perf_event *hwc = &event->hw; in nhmex_uncore_msr_enable_event() local 249 if (hwc->idx == UNCORE_PMC_IDX_FIXED) in nhmex_uncore_msr_enable_event() 252 wrmsrl(hwc->config_base, hwc->config | NHMEX_PMON_CTL_EN_BIT22); in nhmex_uncore_msr_enable_event() 254 wrmsrl(hwc->config_base, hwc->config | NHMEX_PMON_CTL_EN_BIT0); in nhmex_uncore_msr_enable_event() 352 struct hw_perf_event *hwc = &event->hw; in nhmex_bbox_hw_config() local 379 struct hw_perf_event *hwc = &event->hw; in nhmex_bbox_msr_enable_event() local 474 wrmsrl(hwc->config_base, hwc->config | NHMEX_PMON_CTL_EN_BIT22); in nhmex_sbox_msr_enable_event() 862 wrmsrl(hwc->config_base, hwc->config | NHMEX_PMON_CTL_EN_BIT0); in nhmex_mbox_msr_enable_event() 1028 er->config = (hwc->config >> 32); in nhmex_rbox_get_constraint() 1135 hwc->config >> 32); in nhmex_rbox_msr_enable_event() [all …]
|
| A D | p4.c | 861 rdmsrl(hwc->config_base, v); in p4_pmu_clear_cccr_ovf() 874 rdmsrl(hwc->event_base, v); in p4_pmu_clear_cccr_ovf() 967 WARN_ON_ONCE(hwc->idx == 1); in __p4_pmu_enable_event() 1014 struct hw_perf_event *hwc; in p4_pmu_handle_irq() local 1031 hwc = &event->hw; in p4_pmu_handle_irq() 1108 hwc->config |= P4_CONFIG_HT; in p4_pmu_swap_config_ts() 1222 struct hw_perf_event *hwc; in p4_pmu_schedule_events() local 1252 if (hwc->idx != -1 && !p4_should_swap_ts(hwc->config, cpu)) { in p4_pmu_schedule_events() 1253 cntr_idx = hwc->idx; in p4_pmu_schedule_events() 1255 assign[i] = hwc->idx; in p4_pmu_schedule_events() [all …]
|
| /linux/arch/arm/kernel/ |
| A D | perf_event_xscale.c | 175 struct hw_perf_event *hwc; in xscale1pmu_handle_irq() local 183 hwc = &event->hw; in xscale1pmu_handle_irq() 210 int idx = hwc->idx; in xscale1pmu_enable_event() 246 int idx = hwc->idx; in xscale1pmu_disable_event() 328 int counter = hwc->idx; in xscale1pmu_read_counter() 349 int counter = hwc->idx; in xscale1pmu_write_counter() 529 hwc = &event->hw; in xscale2pmu_handle_irq() 556 int idx = hwc->idx; in xscale2pmu_enable_event() 602 int idx = hwc->idx; in xscale2pmu_disable_event() 691 int counter = hwc->idx; in xscale2pmu_read_counter() [all …]
|
| /linux/arch/s390/kernel/ |
| A D | perf_cpum_sf.c | 356 num = min_t(unsigned long, num, sfb_max_limit(hwc) - SFB_ALLOC_REG(hwc)); in sfb_account_allocs() 363 SFB_ALLOC_REG(hwc) = 0; in sfb_init_allocs() 426 SAMPL_RATE(hwc), freq, n_sdb, sfb_max_limit(hwc), in allocate_buffers() 467 if (!OVERFLOW_REG(hwc)) in sfb_account_overflows() 487 OVERFLOW_REG(hwc) = 0; in sfb_account_overflows() 616 hwc->last_period = hwc->sample_period; in hw_init_period() 617 local64_set(&hwc->period_left, hwc->sample_period); in hw_init_period() 757 hw_init_period(hwc, SAMPL_RATE(hwc)); in __hw_perf_event_init_rate() 847 OVERFLOW_REG(hwc) = 0; in __hw_perf_event_init() 1321 OVERFLOW_REG(hwc) = DIV_ROUND_UP(OVERFLOW_REG(hwc) + in hw_perf_event_update() [all …]
|
| /linux/arch/sh/kernel/ |
| A D | perf_event.c | 103 struct hw_perf_event *hwc = &event->hw; in __hw_perf_event_init() local 153 hwc->config |= config; in __hw_perf_event_init() 202 struct hw_perf_event *hwc = &event->hw; in sh_pmu_stop() local 203 int idx = hwc->idx; in sh_pmu_stop() 206 sh_pmu->disable(hwc, idx); in sh_pmu_stop() 220 struct hw_perf_event *hwc = &event->hw; in sh_pmu_start() local 221 int idx = hwc->idx; in sh_pmu_start() 231 sh_pmu->enable(hwc, idx); in sh_pmu_start() 248 int idx = hwc->idx; in sh_pmu_add() 259 hwc->idx = idx; in sh_pmu_add() [all …]
|
| /linux/arch/x86/events/ |
| A D | core.c | 389 hwc->config |= val; in set_ext_hw_attr() 481 hwc->last_period = hwc->sample_period; in x86_setup_perfctr() 482 local64_set(&hwc->period_left, hwc->sample_period); in x86_setup_perfctr() 1029 if (hwc->idx == -1) in x86_schedule_events() 1222 switch (hwc->idx) { in x86_assign_hw_event() 1242 hwc->config_base = x86_pmu_config_addr(hwc->idx); in x86_assign_hw_event() 1243 hwc->event_base = x86_pmu_event_addr(hwc->idx); in x86_assign_hw_event() 1244 hwc->event_base_rdpmc = x86_pmu_rdpmc_index(hwc->idx); in x86_assign_hw_event() 1304 hwc = &event->hw; in x86_pmu_enable() 1331 hwc = &event->hw; in x86_pmu_enable() [all …]
|
| /linux/arch/mips/kernel/ |
| A D | perf_event_mipsxx.c | 477 hwc->state = 0; in mipspmu_start() 480 mipspmu_event_set_period(event, hwc, hwc->idx); in mipspmu_start() 483 mipsxx_pmu_enable_event(hwc, hwc->idx); in mipspmu_start() 494 mipspmu_event_update(event, hwc, hwc->idx); in mipspmu_stop() 539 int idx = hwc->idx; in mipspmu_del() 555 if (hwc->idx < 0) in mipspmu_read() 558 mipspmu_event_update(event, hwc, hwc->idx); in mipspmu_read() 1525 hwc->idx = -1; in __hw_perf_event_init() 1526 hwc->config = 0; in __hw_perf_event_init() 1530 hwc->last_period = hwc->sample_period; in __hw_perf_event_init() [all …]
|