| /linux/drivers/gpu/drm/msm/ |
| A D | msm_gem_vma.c | 18 if (aspace->mmu) in msm_gem_address_space_destroy() 19 aspace->mmu->funcs->destroy(aspace->mmu); in msm_gem_address_space_destroy() 21 kfree(aspace); in msm_gem_address_space_destroy() 27 if (aspace) in msm_gem_address_space_put() 37 return aspace; in msm_gem_address_space_get() 54 if (aspace->mmu) in msm_gem_purge_vma() 55 aspace->mmu->funcs->unmap(aspace->mmu, vma->iova, size); in msm_gem_purge_vma() 87 if (aspace && aspace->mmu) in msm_gem_map_vma() 88 ret = aspace->mmu->funcs->map(aspace->mmu, vma->iova, sgt, in msm_gem_map_vma() 151 aspace = kzalloc(sizeof(*aspace), GFP_KERNEL); in msm_gem_address_space_create() [all …]
|
| A D | msm_gem.c | 310 vma->aspace = aspace; in add_vma() 326 if (vma->aspace == aspace) in lookup_vma() 357 if (vma->aspace) { in put_iova_spaces() 388 vma = lookup_vma(obj, aspace); in get_iova_locked() 391 vma = add_vma(obj, aspace); in get_iova_locked() 429 vma = lookup_vma(obj, aspace); in msm_gem_pin_iova() 522 vma = lookup_vma(obj, aspace); in msm_gem_iova() 935 if (vma->aspace) { in msm_gem_describe() 936 struct msm_gem_address_space *aspace = vma->aspace; in msm_gem_describe() local 944 name = aspace->name; in msm_gem_describe() [all …]
|
| A D | msm_gem.h | 43 struct msm_gem_address_space *aspace; member 111 struct msm_gem_address_space *aspace, uint64_t *iova); 113 struct msm_gem_address_space *aspace, uint64_t *iova, 116 struct msm_gem_address_space *aspace, uint64_t *iova); 118 struct msm_gem_address_space *aspace, uint64_t *iova); 120 struct msm_gem_address_space *aspace); 122 struct msm_gem_address_space *aspace); 124 struct msm_gem_address_space *aspace); 147 uint32_t flags, struct msm_gem_address_space *aspace, 150 struct msm_gem_address_space *aspace); [all …]
|
| A D | msm_gpu.c | 491 gpu->aspace->mmu->funcs->resume_translation(gpu->aspace->mmu); in fault_worker() 815 if (!IS_ERR(aspace)) in msm_gpu_create_private_address_space() 816 aspace->pid = get_pid(task_pid(task)); in msm_gpu_create_private_address_space() 819 if (IS_ERR_OR_NULL(aspace)) in msm_gpu_create_private_address_space() 820 aspace = msm_gem_address_space_get(gpu->aspace); in msm_gpu_create_private_address_space() 822 return aspace; in msm_gpu_create_private_address_space() 910 if (gpu->aspace == NULL) in msm_gpu_init() 912 else if (IS_ERR(gpu->aspace)) { in msm_gpu_init() 913 ret = PTR_ERR(gpu->aspace); in msm_gpu_init() 982 if (!IS_ERR_OR_NULL(gpu->aspace)) { in msm_gpu_cleanup() [all …]
|
| A D | msm_drv.h | 270 int msm_gem_init_vma(struct msm_gem_address_space *aspace, 273 void msm_gem_purge_vma(struct msm_gem_address_space *aspace, 275 void msm_gem_unmap_vma(struct msm_gem_address_space *aspace, 277 int msm_gem_map_vma(struct msm_gem_address_space *aspace, 280 void msm_gem_close_vma(struct msm_gem_address_space *aspace, 285 msm_gem_address_space_get(struct msm_gem_address_space *aspace); 287 void msm_gem_address_space_put(struct msm_gem_address_space *aspace); 317 struct msm_gem_address_space *aspace); 319 struct msm_gem_address_space *aspace); 321 struct msm_gem_address_space *aspace, int plane);
|
| A D | msm_fb.c | 57 struct msm_gem_address_space *aspace) in msm_framebuffer_prepare() argument 63 ret = msm_gem_get_and_pin_iova(fb->obj[i], aspace, &iova); in msm_framebuffer_prepare() 73 struct msm_gem_address_space *aspace) in msm_framebuffer_cleanup() argument 78 msm_gem_unpin_iova(fb->obj[i], aspace); in msm_framebuffer_cleanup() 82 struct msm_gem_address_space *aspace, int plane) in msm_framebuffer_iova() argument 86 return msm_gem_iova(fb->obj[plane], aspace) + fb->offsets[plane]; in msm_framebuffer_iova()
|
| A D | msm_ringbuffer.c | 70 gpu->aspace, &ring->bo, &ring->iova); in msm_ringbuffer_new() 121 msm_gem_kernel_put(ring->bo, ring->gpu->aspace); in msm_ringbuffer_destroy()
|
| /linux/drivers/vme/bridges/ |
| A D | vme_fake.c | 212 bridge->slaves[i].aspace = aspace; in fake_slave_set() 240 *aspace = bridge->slaves[i].aspace; in fake_slave_get() 320 bridge->masters[i].aspace = aspace; in fake_master_set() 352 *aspace = bridge->masters[i].aspace; in __fake_master_get() 430 if (aspace != bridge->slaves[i].aspace) in fake_vmeread8() 460 if (aspace != bridge->slaves[i].aspace) in fake_vmeread16() 493 if (aspace != bridge->slaves[i].aspace) in fake_vmeread32() 535 aspace = priv->masters[i].aspace; in fake_master_read() 624 if (aspace != bridge->slaves[i].aspace) in fake_vmewrite8() 728 aspace = bridge->masters[i].aspace; in fake_master_write() [all …]
|
| A D | vme_tsi148.c | 489 switch (aspace) { in tsi148_slave_set() 655 *aspace = 0; in tsi148_slave_get() 663 *aspace |= VME_A16; in tsi148_slave_get() 667 *aspace |= VME_A24; in tsi148_slave_get() 671 *aspace |= VME_A32; in tsi148_slave_get() 675 *aspace |= VME_A64; in tsi148_slave_get() 964 switch (aspace) { in tsi148_master_set() 1088 *aspace = 0; in __tsi148_master_get() 1473 switch (aspace) { in tsi148_dma_set_vme_src_attributes() 1572 switch (aspace) { in tsi148_dma_set_vme_dest_attributes() [all …]
|
| A D | vme_ca91cx42.c | 349 switch (aspace) { in ca91cx42_slave_set() 467 *aspace = 0; in ca91cx42_slave_get() 474 *aspace = VME_A16; in ca91cx42_slave_get() 476 *aspace = VME_A24; in ca91cx42_slave_get() 478 *aspace = VME_A32; in ca91cx42_slave_get() 686 switch (aspace) { in ca91cx42_master_set() 768 *aspace = 0; in __ca91cx42_master_get() 778 *aspace = VME_A16; in __ca91cx42_master_get() 781 *aspace = VME_A24; in __ca91cx42_master_get() 784 *aspace = VME_A32; in __ca91cx42_master_get() [all …]
|
| /linux/drivers/vme/ |
| A D | vme.c | 167 u32 aspace, cycle, dwidth; in vme_get_size() local 201 switch (aspace) { in vme_check_window() 381 if (!(((image->address_attr & aspace) == aspace) && in vme_slave_set() 392 aspace, cycle); in vme_slave_set() 431 aspace, cycle); in vme_slave_get() 585 if (!(((image->address_attr & aspace) == aspace) && in vme_master_set() 1065 vme_attr->aspace = aspace; in vme_dma_vme_attribute() 1256 if ((aspace == handler->aspace) && in vme_bus_error_handler() 1284 handler->aspace = aspace; in vme_register_error_handler() 1564 u32 aspace, u32 cycle) in vme_lm_set() argument [all …]
|
| A D | vme_bridge.h | 49 u32 aspace; member 86 u32 aspace; /* Address space of error window*/ member 186 struct vme_bridge *bridge, u32 aspace,
|
| /linux/drivers/gpu/drm/msm/disp/mdp4/ |
| A D | mdp4_kms.c | 130 struct msm_gem_address_space *aspace = kms->aspace; in mdp4_destroy() local 133 msm_gem_unpin_iova(mdp4_kms->blank_cursor_bo, kms->aspace); in mdp4_destroy() 136 if (aspace) { in mdp4_destroy() 137 aspace->mmu->funcs->detach(aspace->mmu); in mdp4_destroy() 138 msm_gem_address_space_put(aspace); in mdp4_destroy() 399 struct msm_gem_address_space *aspace; in mdp4_kms_init() local 511 aspace = msm_gem_address_space_create(mmu, in mdp4_kms_init() 514 if (IS_ERR(aspace)) { in mdp4_kms_init() 517 ret = PTR_ERR(aspace); in mdp4_kms_init() 521 kms->aspace = aspace; in mdp4_kms_init() [all …]
|
| A D | mdp4_plane.c | 105 msm_framebuffer_cleanup(fb, kms->aspace); in mdp4_plane_cleanup_fb() 156 msm_framebuffer_iova(fb, kms->aspace, 0)); in mdp4_plane_set_scanout() 158 msm_framebuffer_iova(fb, kms->aspace, 1)); in mdp4_plane_set_scanout() 160 msm_framebuffer_iova(fb, kms->aspace, 2)); in mdp4_plane_set_scanout() 162 msm_framebuffer_iova(fb, kms->aspace, 3)); in mdp4_plane_set_scanout()
|
| /linux/drivers/gpu/drm/msm/disp/mdp5/ |
| A D | mdp5_kms.c | 215 struct msm_gem_address_space *aspace = kms->aspace; in mdp5_kms_destroy() local 224 if (aspace) { in mdp5_kms_destroy() 225 aspace->mmu->funcs->detach(aspace->mmu); in mdp5_kms_destroy() 226 msm_gem_address_space_put(aspace); in mdp5_kms_destroy() 576 struct msm_gem_address_space *aspace; in mdp5_kms_init() local 629 aspace = msm_gem_address_space_create(mmu, "mdp5", in mdp5_kms_init() 632 if (IS_ERR(aspace)) { in mdp5_kms_init() 635 ret = PTR_ERR(aspace); in mdp5_kms_init() 639 kms->aspace = aspace; in mdp5_kms_init() 643 aspace = NULL; in mdp5_kms_init()
|
| /linux/drivers/gpu/drm/msm/adreno/ |
| A D | a5xx_preempt.c | 233 MSM_BO_WC | MSM_BO_MAP_PRIV, gpu->aspace, &bo, &iova); in preempt_init_ring() 241 MSM_BO_WC, gpu->aspace, &counters_bo, &counters_iova); in preempt_init_ring() 243 msm_gem_kernel_put(bo, gpu->aspace); in preempt_init_ring() 275 msm_gem_kernel_put(a5xx_gpu->preempt_bo[i], gpu->aspace); in a5xx_preempt_fini() 276 msm_gem_kernel_put(a5xx_gpu->preempt_counters_bo[i], gpu->aspace); in a5xx_preempt_fini()
|
| A D | a2xx_gpu.c | 109 msm_gpummu_params(gpu->aspace->mmu, &pt_base, &tran_error); in a2xx_hw_init() 455 struct msm_gem_address_space *aspace; in a2xx_create_address_space() local 457 aspace = msm_gem_address_space_create(mmu, "gpu", SZ_16M, in a2xx_create_address_space() 460 if (IS_ERR(aspace) && !IS_ERR(mmu)) in a2xx_create_address_space() 463 return aspace; in a2xx_create_address_space() 535 if (!gpu->aspace) { in a2xx_gpu_init()
|
| A D | a6xx_gmu.c | 1137 msm_gem_kernel_put(gmu->hfi.obj, gmu->aspace); in a6xx_gmu_memory_free() 1138 msm_gem_kernel_put(gmu->debug.obj, gmu->aspace); in a6xx_gmu_memory_free() 1139 msm_gem_kernel_put(gmu->icache.obj, gmu->aspace); in a6xx_gmu_memory_free() 1140 msm_gem_kernel_put(gmu->dcache.obj, gmu->aspace); in a6xx_gmu_memory_free() 1141 msm_gem_kernel_put(gmu->dummy.obj, gmu->aspace); in a6xx_gmu_memory_free() 1142 msm_gem_kernel_put(gmu->log.obj, gmu->aspace); in a6xx_gmu_memory_free() 1144 gmu->aspace->mmu->funcs->detach(gmu->aspace->mmu); in a6xx_gmu_memory_free() 1145 msm_gem_address_space_put(gmu->aspace); in a6xx_gmu_memory_free() 1174 ret = msm_gem_get_and_pin_iova_range(bo->obj, gmu->aspace, &bo->iova, in a6xx_gmu_memory_alloc() 1198 if (IS_ERR(gmu->aspace)) { in a6xx_gmu_memory_probe() [all …]
|
| A D | a6xx_gpu.c | 844 msm_gem_unpin_iova(a6xx_gpu->sqe_bo, gpu->aspace); in a6xx_ucode_init() 1069 gpu->aspace, &a6xx_gpu->shadow_bo, in hw_init() 1245 gpu->aspace->mmu->funcs->resume_translation(gpu->aspace->mmu); in a6xx_fault_handler() 1597 msm_gem_unpin_iova(a6xx_gpu->sqe_bo, gpu->aspace); in a6xx_destroy() 1660 struct msm_gem_address_space *aspace; in a6xx_create_address_space() local 1688 aspace = msm_gem_address_space_create(mmu, "gpu", in a6xx_create_address_space() 1691 if (IS_ERR(aspace) && !IS_ERR(mmu)) in a6xx_create_address_space() 1694 return aspace; in a6xx_create_address_space() 1702 mmu = msm_iommu_pagetable_create(gpu->aspace->mmu); in a6xx_create_private_address_space() 1874 if (gpu->aspace) in a6xx_gpu_init() [all …]
|
| A D | a5xx_gpu.c | 925 gpu->aspace, &a5xx_gpu->shadow_bo, in a5xx_hw_init() 1034 msm_gem_unpin_iova(a5xx_gpu->pm4_bo, gpu->aspace); in a5xx_destroy() 1039 msm_gem_unpin_iova(a5xx_gpu->pfp_bo, gpu->aspace); in a5xx_destroy() 1044 msm_gem_unpin_iova(a5xx_gpu->gpmu_bo, gpu->aspace); in a5xx_destroy() 1049 msm_gem_unpin_iova(a5xx_gpu->shadow_bo, gpu->aspace); in a5xx_destroy() 1438 SZ_1M, MSM_BO_WC, gpu->aspace, in a5xx_crashdumper_init() 1539 msm_gem_kernel_put(dumper.bo, gpu->aspace); in a5xx_gpu_state_get_hlsq_regs() 1547 msm_gem_kernel_put(dumper.bo, gpu->aspace); in a5xx_gpu_state_get_hlsq_regs() 1778 if (gpu->aspace) in a5xx_gpu_init() 1779 msm_mmu_set_fault_handler(gpu->aspace->mmu, gpu, a5xx_fault_handler); in a5xx_gpu_init()
|
| A D | a5xx_debugfs.c | 119 msm_gem_unpin_iova(a5xx_gpu->pm4_bo, gpu->aspace); in reset_set() 125 msm_gem_unpin_iova(a5xx_gpu->pfp_bo, gpu->aspace); in reset_set()
|
| /linux/drivers/staging/vme/devices/ |
| A D | vme_user.h | 14 __u32 aspace; /* Address Space */ member 36 __u32 aspace; /* Address Space */ member
|
| /linux/drivers/gpu/drm/msm/disp/dpu1/ |
| A D | dpu_formats.c | 773 struct msm_gem_address_space *aspace, in _dpu_format_populate_addrs_ubwc() argument 785 if (aspace) in _dpu_format_populate_addrs_ubwc() 786 base_addr = msm_framebuffer_iova(fb, aspace, 0); in _dpu_format_populate_addrs_ubwc() 864 struct msm_gem_address_space *aspace, in _dpu_format_populate_addrs_linear() argument 881 if (aspace) in _dpu_format_populate_addrs_linear() 883 msm_framebuffer_iova(fb, aspace, i); in _dpu_format_populate_addrs_linear() 894 struct msm_gem_address_space *aspace, in dpu_format_populate_layout() argument 926 ret = _dpu_format_populate_addrs_ubwc(aspace, fb, layout); in dpu_format_populate_layout() 928 ret = _dpu_format_populate_addrs_linear(aspace, fb, layout); in dpu_format_populate_layout()
|
| A D | dpu_kms.c | 901 if (!dpu_kms->base.aspace) in _dpu_kms_mmu_destroy() 904 mmu = dpu_kms->base.aspace->mmu; in _dpu_kms_mmu_destroy() 907 msm_gem_address_space_put(dpu_kms->base.aspace); in _dpu_kms_mmu_destroy() 909 dpu_kms->base.aspace = NULL; in _dpu_kms_mmu_destroy() 915 struct msm_gem_address_space *aspace; in _dpu_kms_mmu_init() local 927 aspace = msm_gem_address_space_create(mmu, "dpu1", in _dpu_kms_mmu_init() 930 if (IS_ERR(aspace)) { in _dpu_kms_mmu_init() 932 return PTR_ERR(aspace); in _dpu_kms_mmu_init() 935 dpu_kms->base.aspace = aspace; in _dpu_kms_mmu_init()
|
| A D | dpu_formats.h | 62 struct msm_gem_address_space *aspace,
|