| /linux/drivers/gpu/drm/i915/gvt/ |
| A D | vgpu.c | 46 vgpu_vreg_t(vgpu, vgtif_reg(vgt_id)) = vgpu->id; in populate_pvinfo_page() 68 vgpu_aperture_gmadr_base(vgpu), vgpu_aperture_sz(vgpu)); in populate_pvinfo_page() 70 vgpu_hidden_gmadr_base(vgpu), vgpu_hidden_sz(vgpu)); in populate_pvinfo_page() 306 vfree(vgpu); in intel_gvt_destroy_vgpu() 326 vgpu = vzalloc(sizeof(*vgpu)); in intel_gvt_create_idle_vgpu() 327 if (!vgpu) in intel_gvt_create_idle_vgpu() 343 return vgpu; in intel_gvt_create_idle_vgpu() 346 vfree(vgpu); in intel_gvt_create_idle_vgpu() 363 vfree(vgpu); in intel_gvt_destroy_idle_vgpu() 377 vgpu = vzalloc(sizeof(*vgpu)); in __intel_gvt_create_vgpu() [all …]
|
| A D | display.c | 81 if (edp_pipe_is_enabled(vgpu) && in pipe_is_enabled() 82 get_edp_pipe(vgpu) == pipe) in pipe_is_enabled() 377 vgpu_vreg_t(vgpu, DPLL_CTRL1) = in emulate_monitor_status_change() 379 vgpu_vreg_t(vgpu, DPLL_CTRL1) |= in emulate_monitor_status_change() 381 vgpu_vreg_t(vgpu, LCPLL1_CTL) = in emulate_monitor_status_change() 398 vgpu_vreg_t(vgpu, DPLL_CTRL2) &= in emulate_monitor_status_change() 522 struct intel_vgpu *vgpu; in vblank_timer_fn() local 591 intel_vgpu_port(vgpu, vgpu->display.port_num); in vgpu_update_vblank_emulation() 651 mutex_lock(&vgpu->vgpu_lock); in intel_vgpu_emulate_vblank() 654 mutex_unlock(&vgpu->vgpu_lock); in intel_vgpu_emulate_vblank() [all …]
|
| A D | cfg_space.c | 96 if (off == vgpu->cfg_space.pmcsr_off && vgpu->cfg_space.pmcsr_off) { in vgpu_pci_cfg_mem_write() 100 vgpu->d3_entered = true; in vgpu_pci_cfg_mem_write() 102 vgpu->id, pwr); in vgpu_pci_cfg_mem_write() 201 ret = trap_gttmmio(vgpu, false); in emulate_pci_command_write() 208 ret = trap_gttmmio(vgpu, true); in emulate_pci_command_write() 211 ret = map_aperture(vgpu, true); in emulate_pci_command_write() 283 trap_gttmmio(vgpu, false); in emulate_pci_bar_write() 289 map_aperture(vgpu, false); in emulate_pci_bar_write() 418 vgpu->cfg_space.pmcsr_off = 0; in intel_vgpu_init_cfg_space() 444 trap_gttmmio(vgpu, false); in intel_vgpu_reset_cfg_space() [all …]
|
| A D | gvt.h | 116 #define vgpu_cfg_space(vgpu) ((vgpu)->cfg_space.virtual_cfg_space) argument 130 #define vgpu_opregion(vgpu) (&(vgpu->opregion)) argument 405 #define vgpu_aperture_sz(vgpu) ((vgpu)->gm.aperture_sz) argument 406 #define vgpu_hidden_sz(vgpu) ((vgpu)->gm.hidden_sz) argument 409 (gvt_aperture_pa_base(vgpu->gvt) + vgpu_aperture_offset(vgpu)) 411 #define vgpu_ggtt_gm_sz(vgpu) ((vgpu)->gm.aperture_sz + (vgpu)->gm.hidden_sz) argument 414 (vgpu_aperture_pa_base(vgpu) + vgpu_aperture_sz(vgpu) - 1) 418 (vgpu_aperture_gmadr_base(vgpu) + vgpu_aperture_sz(vgpu) - 1) 422 (vgpu_hidden_gmadr_base(vgpu) + vgpu_hidden_sz(vgpu) - 1) 424 #define vgpu_fence_base(vgpu) (vgpu->fence.base) argument [all …]
|
| A D | edid.c | 145 intel_vgpu_init_i2c_edid(vgpu); in gmbus0_mmio_write() 185 reset_gmbus_controller(vgpu); in gmbus1_mmio_write() 219 vgpu->id, slave_addr); in gmbus1_mmio_write() 239 intel_vgpu_init_i2c_edid(vgpu); in gmbus1_mmio_write() 271 vgpu_vreg(vgpu, offset) = wvalue; in gmbus1_mmio_write() 306 byte_data = edid_get_byte(vgpu); in gmbus3_mmio_read() 325 intel_vgpu_init_i2c_edid(vgpu); in gmbus3_mmio_read() 491 vgpu_vreg(vgpu, offset) = value; in intel_gvt_i2c_handle_aux_ch_write() 497 msg = vgpu_vreg(vgpu, offset + 4); in intel_gvt_i2c_handle_aux_ch_write() 508 vgpu_vreg(vgpu, offset) = in intel_gvt_i2c_handle_aux_ch_write() [all …]
|
| A D | mmio.c | 67 if (!vgpu || !p_data) in failsafe_emulate_mmio_rw() 70 gvt = vgpu->gvt; in failsafe_emulate_mmio_rw() 71 mutex_lock(&vgpu->vgpu_lock); in failsafe_emulate_mmio_rw() 89 mutex_unlock(&vgpu->vgpu_lock); in failsafe_emulate_mmio_rw() 110 if (vgpu->failsafe) { in intel_vgpu_emulate_mmio_read() 114 mutex_lock(&vgpu->vgpu_lock); in intel_vgpu_emulate_mmio_read() 185 if (vgpu->failsafe) { in intel_vgpu_emulate_mmio_write() 190 mutex_lock(&vgpu->vgpu_lock); in intel_vgpu_emulate_mmio_write() 312 if (!vgpu->mmio.vreg) in intel_vgpu_init_mmio() 327 vfree(vgpu->mmio.vreg); in intel_vgpu_clean_mmio() [all …]
|
| A D | aperture_gm.c | 89 ret = alloc_gm(vgpu, true); in alloc_vgpu_gm() 94 vgpu_aperture_offset(vgpu), vgpu_aperture_sz(vgpu)); in alloc_vgpu_gm() 97 vgpu_hidden_offset(vgpu), vgpu_hidden_sz(vgpu)); in alloc_vgpu_gm() 179 _clear_vgpu_fence(vgpu); in free_vgpu_fence() 211 _clear_vgpu_fence(vgpu); in alloc_vgpu_fence() 308 free_vgpu_gm(vgpu); in intel_vgpu_free_resource() 309 free_vgpu_fence(vgpu); in intel_vgpu_free_resource() 310 free_resource(vgpu); in intel_vgpu_free_resource() 326 _clear_vgpu_fence(vgpu); in intel_vgpu_reset_resource() 361 free_vgpu_gm(vgpu); in intel_vgpu_alloc_resource() [all …]
|
| A D | mpt.h | 88 return intel_gvt_host.mpt->attach_vgpu(vgpu, &vgpu->handle); in intel_gvt_hypervisor_attach_vgpu() 104 intel_gvt_host.mpt->detach_vgpu(vgpu); in intel_gvt_hypervisor_detach_vgpu() 136 trace_inject_msi(vgpu->id, addr, data); in intel_gvt_hypervisor_inject_msi() 165 struct intel_vgpu *vgpu, unsigned long gfn) in intel_gvt_hypervisor_enable_page_track() argument 179 struct intel_vgpu *vgpu, unsigned long gfn) in intel_gvt_hypervisor_disable_page_track() argument 225 struct intel_vgpu *vgpu, unsigned long gfn) in intel_gvt_hypervisor_gfn_to_mfn() argument 254 struct intel_vgpu *vgpu, dma_addr_t dma_addr) in intel_gvt_hypervisor_dma_unmap_guest_page() argument 286 struct intel_vgpu *vgpu, unsigned long gfn, in intel_gvt_hypervisor_map_gfn_to_mfn() argument 330 return intel_gvt_host.mpt->set_opregion(vgpu); in intel_gvt_hypervisor_set_opregion() 377 intel_gvt_host.mpt->put_vfio_device(vgpu); in intel_gvt_hypervisor_put_vfio_device() [all …]
|
| A D | gtt.c | 843 spt->vgpu = vgpu; in ppgtt_alloc_spt() 986 struct intel_vgpu *vgpu = spt->vgpu; in ppgtt_invalidate_pte() local 1003 struct intel_vgpu *vgpu = spt->vgpu; in ppgtt_invalidate_spt() local 1309 struct intel_vgpu *vgpu = spt->vgpu; in ppgtt_populate_spt() local 1353 struct intel_vgpu *vgpu = spt->vgpu; in ppgtt_handle_guest_entry_removal() local 1398 struct intel_vgpu *vgpu = spt->vgpu; in ppgtt_handle_guest_entry_add() local 1785 struct intel_vgpu *vgpu = mm->vgpu; in invalidate_ppgtt_mm() local 1815 struct intel_vgpu *vgpu = mm->vgpu; in shadow_ppgtt_mm() local 1864 mm->vgpu = vgpu; in vgpu_alloc_mm() 2069 struct intel_vgpu *vgpu = mm->vgpu; in ppgtt_get_next_level_entry() local [all …]
|
| A D | sched_policy.c | 55 struct intel_vgpu *vgpu; member 80 if (!vgpu || vgpu == vgpu->gvt->idle_vgpu) in vgpu_update_timeslice() 192 vgpu = vgpu_data->vgpu; in find_busy_vgpu() 200 vgpu = vgpu_data->vgpu; in find_busy_vgpu() 205 return vgpu; in find_busy_vgpu() 223 if (vgpu) { in tbs_sched_func() 320 data->vgpu = vgpu; in tbs_sched_init_vgpu() 411 ret = vgpu->gvt->scheduler.sched_ops->init_vgpu(vgpu); in intel_vgpu_init_sched_policy() 420 vgpu->gvt->scheduler.sched_ops->clean_vgpu(vgpu); in intel_vgpu_clean_sched_policy() 431 vgpu->gvt->scheduler.sched_ops->start_schedule(vgpu); in intel_vgpu_start_schedule() [all …]
|
| A D | dmabuf.c | 61 struct intel_vgpu *vgpu; in vgpu_gem_get_pages() local 73 vgpu = fb_info->obj->vgpu; in vgpu_gem_get_pages() 129 struct intel_vgpu *vgpu = obj->vgpu; in vgpu_gem_put_pages() local 133 vgpu_unpin_dma_address(vgpu, in vgpu_gem_put_pages() 145 struct intel_vgpu *vgpu = obj->vgpu; in dmabuf_gem_object_free() local 149 if (vgpu && vgpu->active && !list_empty(&vgpu->dmabuf_obj_list_head)) { in dmabuf_gem_object_free() 186 struct intel_vgpu *vgpu = obj->vgpu; in vgpu_gem_release() local 188 if (vgpu) { in vgpu_gem_release() 265 struct intel_vgpu *vgpu, in vgpu_get_plane_info() argument 483 dmabuf_obj->vgpu = vgpu; in intel_vgpu_query_plane() [all …]
|
| A D | kvmgt.c | 399 new->vgpu = vgpu; in __gvt_cache_add() 809 ret = vgpu == NULL ? -EFAULT : PTR_ERR(vgpu); in intel_vgpu_create() 844 struct intel_vgpu *vgpu = vdev->vgpu; in intel_vgpu_iommu_notifier() local 1004 vgpu->handle = 0; in __intel_vgpu_release() 1341 vgpu_aperture_offset(vgpu) + vgpu_aperture_sz(vgpu)) in intel_vgpu_mmap() 1939 info->vgpu = vgpu; in kvmgt_guest_init() 1975 if (!vgpu->vdev) in kvmgt_attach_vgpu() 1978 kvmgt_vdev(vgpu)->vgpu = vgpu; in kvmgt_attach_vgpu() 2013 vgpu = info->vgpu; in kvmgt_inject_msi() 2062 vgpu = ((struct kvmgt_guest_info *)handle)->vgpu; in kvmgt_dma_map_guest_page() [all …]
|
| A D | interrupt.c | 182 trace_write_ir(vgpu->id, "IMR", reg, imr, vgpu_vreg(vgpu, reg), in intel_vgpu_reg_imr_handler() 185 vgpu_vreg(vgpu, reg) = imr; in intel_vgpu_reg_imr_handler() 187 ops->check_pending_irq(vgpu); in intel_vgpu_reg_imr_handler() 224 vgpu_vreg(vgpu, reg) |= ier; in intel_vgpu_reg_master_irq_handler() 253 trace_write_ir(vgpu->id, "IER", reg, ier, vgpu_vreg(vgpu, reg), in intel_vgpu_reg_ier_handler() 256 vgpu_vreg(vgpu, reg) = ier; in intel_vgpu_reg_ier_handler() 291 trace_write_ir(vgpu->id, "IIR", reg, iir, vgpu_vreg(vgpu, reg), in intel_vgpu_reg_iir_handler() 331 u32 val = vgpu_vreg(vgpu, in update_upstream_irq() 333 & vgpu_vreg(vgpu, in update_upstream_irq() 371 vgpu_vreg(vgpu, iir) |= (set_bits & ~vgpu_vreg(vgpu, imr)); in update_upstream_irq() [all …]
|
| A D | execlist.c | 93 struct intel_vgpu *vgpu = execlist->vgpu; in emulate_execlist_status() local 128 struct intel_vgpu *vgpu = execlist->vgpu; in emulate_csb_update() local 162 intel_gvt_hypervisor_write_gpa(vgpu, in emulate_csb_update() 184 struct intel_vgpu *vgpu = execlist->vgpu; in emulate_execlist_ctx_schedule_out() local 257 struct intel_vgpu *vgpu = execlist->vgpu; in get_next_execlist_slot() local 282 struct intel_vgpu *vgpu = execlist->vgpu; in emulate_execlist_schedule_in() local 372 struct intel_vgpu *vgpu = workload->vgpu; in prepare_execlist_workload() local 394 struct intel_vgpu *vgpu = workload->vgpu; in complete_execlist_workload() local 510 execlist->vgpu = vgpu; in init_vgpu_execlist() 543 init_vgpu_execlist(vgpu, engine); in reset_execlist() [all …]
|
| A D | handlers.c | 192 vgpu->failsafe = true; in enter_failsafe_mode() 492 vgpu->id, port_name(port), vgpu_vreg_t(vgpu, SPLL_CTL)); in bdw_vgpu_get_dp_bitrate() 530 vgpu->id, port_name(port), vgpu_vreg_t(vgpu, PORT_CLK_SEL(port))); in bdw_vgpu_get_dp_bitrate() 1014 vgpu_vreg_t(vgpu, DSPSURFLIVE(pipe)) = vgpu_vreg(vgpu, offset); in pri_surf_mmio_write() 1036 vgpu_vreg_t(vgpu, SPRSURFLIVE(pipe)) = vgpu_vreg(vgpu, offset); in spr_surf_mmio_write() 1057 vgpu_vreg_t(vgpu, DSPSURFLIVE(pipe)) = vgpu_vreg(vgpu, offset); in reg50080_mmio_write() 1060 vgpu_vreg_t(vgpu, SPRSURFLIVE(pipe)) = vgpu_vreg(vgpu, offset); in reg50080_mmio_write() 1398 vgpu_vreg(vgpu, offset) = read_virtual_sbi_register(vgpu, in sbi_data_mmio_read() 1566 vgpu->id); in pf_write() 1623 vgpu->id); in dma_ctrl_write() [all …]
|
| A D | page_track.c | 35 struct intel_vgpu *vgpu, unsigned long gfn) in intel_vgpu_find_page_track() argument 37 return radix_tree_lookup(&vgpu->page_track_tree, gfn); in intel_vgpu_find_page_track() 56 track = intel_vgpu_find_page_track(vgpu, gfn); in intel_vgpu_register_page_track() 87 track = radix_tree_delete(&vgpu->page_track_tree, gfn); in intel_vgpu_unregister_page_track() 90 intel_gvt_hypervisor_disable_page_track(vgpu, gfn); in intel_vgpu_unregister_page_track() 108 track = intel_vgpu_find_page_track(vgpu, gfn); in intel_vgpu_enable_page_track() 115 ret = intel_gvt_hypervisor_enable_page_track(vgpu, gfn); in intel_vgpu_enable_page_track() 135 track = intel_vgpu_find_page_track(vgpu, gfn); in intel_vgpu_disable_page_track() 165 mutex_lock(&vgpu->vgpu_lock); in intel_vgpu_page_track_handler() 173 if (unlikely(vgpu->failsafe)) { in intel_vgpu_page_track_handler() [all …]
|
| A D | scheduler.c | 127 struct intel_vgpu *vgpu = workload->vgpu; in populate_shadow_context() local 361 struct intel_vgpu *vgpu = workload->vgpu; in copy_workload_to_ring_buffer() local 459 struct intel_vgpu *vgpu = workload->vgpu; in intel_gvt_workload_req_alloc() local 486 struct intel_vgpu *vgpu = workload->vgpu; in intel_gvt_scan_and_shadow_workload() local 685 struct intel_vgpu *vgpu = workload->vgpu; in intel_vgpu_shadow_mm_pin() local 737 struct intel_vgpu *vgpu = workload->vgpu; in prepare_workload() local 799 struct intel_vgpu *vgpu = workload->vgpu; in dispatch_workload() local 937 struct intel_vgpu *vgpu = workload->vgpu; in update_guest_context() local 1069 struct intel_vgpu *vgpu = workload->vgpu; in complete_current_workload() local 1200 vgpu = workload->vgpu; in workload_thread() [all …]
|
| A D | fb_decoder.c | 187 if (pipe_is_enabled(vgpu, i)) in get_active_pipe() 209 pipe = get_active_pipe(vgpu); in intel_vgpu_decode_primary_plane() 213 val = vgpu_vreg_t(vgpu, DSPCNTR(pipe)); in intel_vgpu_decode_primary_plane() 270 val = vgpu_vreg_t(vgpu, DSPTILEOFF(pipe)); in intel_vgpu_decode_primary_plane() 340 pipe = get_active_pipe(vgpu); in intel_vgpu_decode_cursor_plane() 344 val = vgpu_vreg_t(vgpu, CURCNTR(pipe)); in intel_vgpu_decode_cursor_plane() 381 val = vgpu_vreg_t(vgpu, CURPOS(pipe)); in intel_vgpu_decode_cursor_plane() 419 pipe = get_active_pipe(vgpu); in intel_vgpu_decode_sprite_plane() 423 val = vgpu_vreg_t(vgpu, SPRCTL(pipe)); in intel_vgpu_decode_sprite_plane() 488 val = vgpu_vreg_t(vgpu, SPRSIZE(pipe)); in intel_vgpu_decode_sprite_plane() [all …]
|
| A D | debugfs.c | 29 struct intel_vgpu *vgpu; member 86 struct intel_vgpu *vgpu = s->private; in vgpu_mmio_diff_show() local 87 struct intel_gvt *gvt = vgpu->gvt; in vgpu_mmio_diff_show() 89 .vgpu = vgpu, in vgpu_mmio_diff_show() 131 *val = vgpu->scan_nonprivbb; in vgpu_scan_nonprivbb_get() 146 vgpu->scan_nonprivbb = val; in vgpu_scan_nonprivbb_set() 163 vgpu->debugfs = debugfs_create_dir(name, vgpu->gvt->debugfs_root); in intel_gvt_debugfs_add_vgpu() 165 debugfs_create_bool("active", 0444, vgpu->debugfs, &vgpu->active); in intel_gvt_debugfs_add_vgpu() 166 debugfs_create_file("mmio_diff", 0444, vgpu->debugfs, vgpu, in intel_gvt_debugfs_add_vgpu() 168 debugfs_create_file("scan_nonprivbb", 0644, vgpu->debugfs, vgpu, in intel_gvt_debugfs_add_vgpu() [all …]
|
| A D | opregion.c | 231 if (!vgpu_opregion(vgpu)->va) { in intel_vgpu_init_opregion() 237 buf = (u8 *)vgpu_opregion(vgpu)->va; in intel_vgpu_init_opregion() 271 vgpu_opregion(vgpu)->gfn[i], in map_vgpu_opregion() 280 vgpu_opregion(vgpu)->mapped = map; in map_vgpu_opregion() 311 if (vgpu_opregion(vgpu)->mapped) in intel_vgpu_opregion_base_write_handler() 312 map_vgpu_opregion(vgpu, false); in intel_vgpu_opregion_base_write_handler() 336 if (!vgpu_opregion(vgpu)->va) in intel_vgpu_clean_opregion() 340 if (vgpu_opregion(vgpu)->mapped) in intel_vgpu_clean_opregion() 341 map_vgpu_opregion(vgpu, false); in intel_vgpu_clean_opregion() 348 vgpu_opregion(vgpu)->va = NULL; in intel_vgpu_clean_opregion() [all …]
|
| A D | display.h | 47 #define intel_vgpu_port(vgpu, port) \ argument 48 (&(vgpu->display.ports[port])) 50 #define intel_vgpu_has_monitor_on_port(vgpu, port) \ argument 51 (intel_vgpu_port(vgpu, port)->edid && \ 52 intel_vgpu_port(vgpu, port)->edid->data_valid) 54 #define intel_vgpu_port_is_dp(vgpu, port) \ argument 55 ((intel_vgpu_port(vgpu, port)->type == GVT_DP_A) || \ 56 (intel_vgpu_port(vgpu, port)->type == GVT_DP_B) || \ 57 (intel_vgpu_port(vgpu, port)->type == GVT_DP_C) || \ 58 (intel_vgpu_port(vgpu, port)->type == GVT_DP_D)) [all …]
|
| A D | gtt.h | 63 struct intel_vgpu *vgpu); 69 struct intel_vgpu *vgpu); 153 struct intel_vgpu *vgpu; member 221 int intel_vgpu_init_gtt(struct intel_vgpu *vgpu); 222 void intel_vgpu_clean_gtt(struct intel_vgpu *vgpu); 224 void intel_vgpu_invalidate_ppgtt(struct intel_vgpu *vgpu); 227 void intel_vgpu_reset_gtt(struct intel_vgpu *vgpu); 247 struct intel_vgpu *vgpu; member 269 int intel_vgpu_sync_oos_pages(struct intel_vgpu *vgpu); 271 int intel_vgpu_flush_post_shadow(struct intel_vgpu *vgpu); [all …]
|
| A D | sched_policy.h | 43 int (*init_vgpu)(struct intel_vgpu *vgpu); 44 void (*clean_vgpu)(struct intel_vgpu *vgpu); 45 void (*start_schedule)(struct intel_vgpu *vgpu); 46 void (*stop_schedule)(struct intel_vgpu *vgpu); 55 int intel_vgpu_init_sched_policy(struct intel_vgpu *vgpu); 57 void intel_vgpu_clean_sched_policy(struct intel_vgpu *vgpu); 59 void intel_vgpu_start_schedule(struct intel_vgpu *vgpu); 61 void intel_vgpu_stop_schedule(struct intel_vgpu *vgpu);
|
| A D | scheduler.h | 86 struct intel_vgpu *vgpu; member 136 #define workload_q_head(vgpu, e) \ argument 137 (&(vgpu)->submission.workload_q_head[(e)->id]) 145 void intel_gvt_wait_vgpu_idle(struct intel_vgpu *vgpu); 147 int intel_vgpu_setup_submission(struct intel_vgpu *vgpu); 149 void intel_vgpu_reset_submission(struct intel_vgpu *vgpu, 152 void intel_vgpu_clean_submission(struct intel_vgpu *vgpu); 154 int intel_vgpu_select_submission_ops(struct intel_vgpu *vgpu, 162 intel_vgpu_create_workload(struct intel_vgpu *vgpu, 168 void intel_vgpu_clean_workloads(struct intel_vgpu *vgpu,
|
| A D | mmio.h | 86 int intel_vgpu_init_mmio(struct intel_vgpu *vgpu); 87 void intel_vgpu_reset_mmio(struct intel_vgpu *vgpu, bool dmlr); 88 void intel_vgpu_clean_mmio(struct intel_vgpu *vgpu); 90 int intel_vgpu_gpa_to_mmio_offset(struct intel_vgpu *vgpu, u64 gpa); 92 int intel_vgpu_emulate_mmio_read(struct intel_vgpu *vgpu, u64 pa, 94 int intel_vgpu_emulate_mmio_write(struct intel_vgpu *vgpu, u64 pa, 97 int intel_vgpu_default_mmio_read(struct intel_vgpu *vgpu, unsigned int offset, 99 int intel_vgpu_default_mmio_write(struct intel_vgpu *vgpu, unsigned int offset, 105 int intel_vgpu_mmio_reg_rw(struct intel_vgpu *vgpu, unsigned int offset, 108 int intel_vgpu_mask_mmio_write(struct intel_vgpu *vgpu, unsigned int offset,
|