/linux/drivers/gpu/drm/amd/amdgpu/ |
A D | amdgpu_nbio.c | 35 if (!adev->nbio.ras_if) { in amdgpu_nbio_ras_late_init() 37 if (!adev->nbio.ras_if) in amdgpu_nbio_ras_late_init() 39 adev->nbio.ras_if->block = AMDGPU_RAS_BLOCK__PCIE_BIF; in amdgpu_nbio_ras_late_init() 41 adev->nbio.ras_if->sub_block_index = 0; in amdgpu_nbio_ras_late_init() 43 ih_info.head = fs_info.head = *adev->nbio.ras_if; in amdgpu_nbio_ras_late_init() 44 r = amdgpu_ras_late_init(adev, adev->nbio.ras_if, in amdgpu_nbio_ras_late_init() 63 amdgpu_ras_late_fini(adev, adev->nbio.ras_if, &ih_info); in amdgpu_nbio_ras_late_init() 65 kfree(adev->nbio.ras_if); in amdgpu_nbio_ras_late_init() 66 adev->nbio.ras_if = NULL; in amdgpu_nbio_ras_late_init() 73 adev->nbio.ras_if) { in amdgpu_nbio_ras_fini() [all …]
|
A D | soc15.c | 710 (adev->nbio.funcs->program_aspm)) in soc15_program_aspm() 711 adev->nbio.funcs->program_aspm(adev); in soc15_program_aspm() 1227 if (adev->nbio.ras_funcs && in soc15_common_late_init() 1228 adev->nbio.ras_funcs->ras_late_init) in soc15_common_late_init() 1250 if (adev->nbio.ras_funcs && in soc15_common_sw_fini() 1251 adev->nbio.ras_funcs->ras_fini) in soc15_common_sw_fini() 1252 adev->nbio.ras_funcs->ras_fini(adev); in soc15_common_sw_fini() 1285 adev->nbio.funcs->init_registers(adev); in soc15_common_hw_init() 1314 if (adev->nbio.ras_if && in soc15_common_hw_fini() 1316 if (adev->nbio.ras_funcs && in soc15_common_hw_fini() [all …]
|
A D | nv.c | 237 data = adev->nbio.funcs->get_pcie_data_offset(adev); in nv_pcie_rreg() 331 return adev->nbio.funcs->get_memsize(adev); in nv_get_config_memsize() 591 (adev->nbio.funcs->program_aspm)) in nv_program_aspm() 592 adev->nbio.funcs->program_aspm(adev); in nv_program_aspm() 619 return adev->nbio.funcs->get_rev_id(adev); in nv_get_rev_id() 702 (adev->nbio.funcs->enable_aspm)) in nv_update_umd_stable_pstate() 703 adev->nbio.funcs->enable_aspm(adev, !enter); in nv_update_umd_stable_pstate() 1022 if (adev->nbio.funcs->apply_lc_spc_mode_wa) in nv_common_hw_init() 1023 adev->nbio.funcs->apply_lc_spc_mode_wa(adev); in nv_common_hw_init() 1033 adev->nbio.funcs->init_registers(adev); in nv_common_hw_init() [all …]
|
A D | df_v3_6.c | 51 address = adev->nbio.funcs->get_pcie_index_offset(adev); in df_v3_6_get_fica() 52 data = adev->nbio.funcs->get_pcie_data_offset(adev); in df_v3_6_get_fica() 74 address = adev->nbio.funcs->get_pcie_index_offset(adev); in df_v3_6_set_fica() 75 data = adev->nbio.funcs->get_pcie_data_offset(adev); in df_v3_6_set_fica() 102 address = adev->nbio.funcs->get_pcie_index_offset(adev); in df_v3_6_perfmon_rreg() 103 data = adev->nbio.funcs->get_pcie_data_offset(adev); in df_v3_6_perfmon_rreg() 124 address = adev->nbio.funcs->get_pcie_index_offset(adev); in df_v3_6_perfmon_wreg() 125 data = adev->nbio.funcs->get_pcie_data_offset(adev); in df_v3_6_perfmon_wreg() 143 address = adev->nbio.funcs->get_pcie_index_offset(adev); in df_v3_6_perfmon_arm_with_status() 144 data = adev->nbio.funcs->get_pcie_data_offset(adev); in df_v3_6_perfmon_arm_with_status()
|
A D | nbio_v7_4.c | 373 struct ras_manager *obj = amdgpu_ras_find_obj(adev, adev->nbio.ras_if); in nbio_v7_4_handle_ras_controller_intr_no_bifring() 410 get_ras_block_str(adev->nbio.ras_if)); in nbio_v7_4_handle_ras_controller_intr_no_bifring() 416 get_ras_block_str(adev->nbio.ras_if)); in nbio_v7_4_handle_ras_controller_intr_no_bifring() 559 adev->nbio.ras_controller_irq.funcs = in nbio_v7_4_init_ras_controller_interrupt() 561 adev->nbio.ras_controller_irq.num_types = 1; in nbio_v7_4_init_ras_controller_interrupt() 566 &adev->nbio.ras_controller_irq); in nbio_v7_4_init_ras_controller_interrupt() 577 adev->nbio.ras_err_event_athub_irq.funcs = in nbio_v7_4_init_ras_err_event_athub_interrupt() 579 adev->nbio.ras_err_event_athub_irq.num_types = 1; in nbio_v7_4_init_ras_err_event_athub_interrupt() 584 &adev->nbio.ras_err_event_athub_irq); in nbio_v7_4_init_ras_err_event_athub_interrupt()
|
A D | amdgpu_discovery.c | 1207 adev->nbio.funcs = &nbio_v6_1_funcs; in amdgpu_discovery_set_ip_blocks() 1208 adev->nbio.hdp_flush_reg = &nbio_v6_1_hdp_flush_reg; in amdgpu_discovery_set_ip_blocks() 1213 adev->nbio.funcs = &nbio_v7_0_funcs; in amdgpu_discovery_set_ip_blocks() 1214 adev->nbio.hdp_flush_reg = &nbio_v7_0_hdp_flush_reg; in amdgpu_discovery_set_ip_blocks() 1218 adev->nbio.funcs = &nbio_v7_4_funcs; in amdgpu_discovery_set_ip_blocks() 1219 adev->nbio.hdp_flush_reg = &nbio_v7_4_hdp_flush_reg; in amdgpu_discovery_set_ip_blocks() 1222 adev->nbio.funcs = &nbio_v7_4_funcs; in amdgpu_discovery_set_ip_blocks() 1228 adev->nbio.funcs = &nbio_v7_2_funcs; in amdgpu_discovery_set_ip_blocks() 1229 adev->nbio.hdp_flush_reg = &nbio_v7_2_hdp_flush_reg; in amdgpu_discovery_set_ip_blocks() 1235 adev->nbio.funcs = &nbio_v2_3_funcs; in amdgpu_discovery_set_ip_blocks() [all …]
|
A D | amdgpu_irq.c | 202 if (adev->nbio.ras_funcs && in amdgpu_irq_handler() 203 adev->nbio.ras_funcs->handle_ras_controller_intr_no_bifring) in amdgpu_irq_handler() 204 adev->nbio.ras_funcs->handle_ras_controller_intr_no_bifring(adev); in amdgpu_irq_handler() 206 if (adev->nbio.ras_funcs && in amdgpu_irq_handler() 207 adev->nbio.ras_funcs->handle_ras_err_event_athub_intr_no_bifring) in amdgpu_irq_handler() 208 adev->nbio.ras_funcs->handle_ras_err_event_athub_intr_no_bifring(adev); in amdgpu_irq_handler()
|
A D | amdgpu_ras.c | 944 if (adev->nbio.ras_funcs && in amdgpu_ras_query_error_status() 945 adev->nbio.ras_funcs->query_ras_error_count) in amdgpu_ras_query_error_status() 946 adev->nbio.ras_funcs->query_ras_error_count(adev, &err_data); in amdgpu_ras_query_error_status() 2317 adev->nbio.ras_funcs = &nbio_v7_4_ras_funcs; in amdgpu_ras_init() 2324 if (adev->nbio.ras_funcs && in amdgpu_ras_init() 2325 adev->nbio.ras_funcs->init_ras_controller_interrupt) { in amdgpu_ras_init() 2326 r = adev->nbio.ras_funcs->init_ras_controller_interrupt(adev); in amdgpu_ras_init() 2331 if (adev->nbio.ras_funcs && in amdgpu_ras_init() 2332 adev->nbio.ras_funcs->init_ras_err_event_athub_interrupt) { in amdgpu_ras_init() 2333 r = adev->nbio.ras_funcs->init_ras_err_event_athub_interrupt(adev); in amdgpu_ras_init()
|
A D | navi10_ih.c | 329 adev->nbio.funcs->ih_control(adev); in navi10_ih_irq_init() 361 adev->nbio.funcs->ih_doorbell_range(adev, ih[0]->use_doorbell, in navi10_ih_irq_init()
|
A D | sdma_v5_2.c | 405 const struct nbio_hdp_flush_reg *nbio_hf_reg = adev->nbio.hdp_flush_reg; in sdma_v5_2_ring_emit_hdp_flush() 412 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_done_offset(adev)) << 2); in sdma_v5_2_ring_emit_hdp_flush() 413 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_req_offset(adev)) << 2); in sdma_v5_2_ring_emit_hdp_flush() 680 adev->nbio.funcs->sdma_doorbell_range(adev, i, ring->use_doorbell, in sdma_v5_2_gfx_resume()
|
A D | sdma_v5_0.c | 517 const struct nbio_hdp_flush_reg *nbio_hf_reg = adev->nbio.hdp_flush_reg; in sdma_v5_0_ring_emit_hdp_flush() 527 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_done_offset(adev)) << 2); in sdma_v5_0_ring_emit_hdp_flush() 528 amdgpu_ring_write(ring, (adev->nbio.funcs->get_hdp_flush_req_offset(adev)) << 2); in sdma_v5_0_ring_emit_hdp_flush() 805 adev->nbio.funcs->sdma_doorbell_range(adev, i, ring->use_doorbell, in sdma_v5_0_gfx_resume()
|
A D | jpeg_v3_0.c | 143 adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell, in jpeg_v3_0_hw_init()
|
A D | amdgpu_device.c | 4456 u32 memsize = adev->nbio.funcs->get_memsize(adev); in amdgpu_device_mode1_reset() 5338 adev->nbio.funcs->enable_doorbell_interrupt) in amdgpu_device_baco_enter() 5339 adev->nbio.funcs->enable_doorbell_interrupt(adev, false); in amdgpu_device_baco_enter() 5358 adev->nbio.funcs->enable_doorbell_interrupt) in amdgpu_device_baco_exit() 5359 adev->nbio.funcs->enable_doorbell_interrupt(adev, true); in amdgpu_device_baco_exit() 5362 adev->nbio.funcs->clear_doorbell_interrupt) in amdgpu_device_baco_exit() 5363 adev->nbio.funcs->clear_doorbell_interrupt(adev); in amdgpu_device_baco_exit()
|
A D | vega10_ih.c | 273 adev->nbio.funcs->ih_control(adev); in vega10_ih_irq_init()
|
A D | jpeg_v2_5.c | 167 adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell, in jpeg_v2_5_hw_init()
|
A D | vega20_ih.c | 309 adev->nbio.funcs->ih_control(adev); in vega20_ih_irq_init()
|
A D | jpeg_v2_0.c | 134 adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell, in jpeg_v2_0_hw_init()
|
A D | sdma_v4_0.c | 931 const struct nbio_hdp_flush_reg *nbio_hf_reg = adev->nbio.hdp_flush_reg; in sdma_v4_0_ring_emit_hdp_flush() 936 adev->nbio.funcs->get_hdp_flush_done_offset(adev), in sdma_v4_0_ring_emit_hdp_flush() 937 adev->nbio.funcs->get_hdp_flush_req_offset(adev), in sdma_v4_0_ring_emit_hdp_flush()
|
A D | gmc_v10_0.c | 779 adev->nbio.funcs->get_memsize(adev) * 1024ULL * 1024ULL; in gmc_v10_0_mc_init()
|
A D | amdgpu.h | 964 struct amdgpu_nbio nbio; member
|
A D | gmc_v9_0.c | 1356 adev->nbio.funcs->get_memsize(adev) * 1024ULL * 1024ULL; in gmc_v9_0_mc_init()
|
A D | vcn_v2_0.c | 222 adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell, in vcn_v2_0_hw_init()
|
A D | vcn_v2_5.c | 280 adev->nbio.funcs->vcn_doorbell_range(adev, ring->use_doorbell, in vcn_v2_5_hw_init()
|
A D | gfx_v9_0.c | 5351 const struct nbio_hdp_flush_reg *nbio_hf_reg = adev->nbio.hdp_flush_reg; in gfx_v9_0_ring_emit_hdp_flush() 5371 adev->nbio.funcs->get_hdp_flush_req_offset(adev), in gfx_v9_0_ring_emit_hdp_flush() 5372 adev->nbio.funcs->get_hdp_flush_done_offset(adev), in gfx_v9_0_ring_emit_hdp_flush()
|
/linux/drivers/block/xen-blkback/ |
A D | blkback.c | 1194 int i, nbio = 0; in dispatch_rw_block_io() local 1333 biolist[nbio++] = bio; in dispatch_rw_block_io() 1352 biolist[nbio++] = bio; in dispatch_rw_block_io() 1359 atomic_set(&pending_req->pendcnt, nbio); in dispatch_rw_block_io() 1362 for (i = 0; i < nbio; i++) in dispatch_rw_block_io() 1386 for (i = 0; i < nbio; i++) in dispatch_rw_block_io()
|