| /linux/drivers/gpu/drm/amd/amdgpu/ |
| A D | amdgpu_ttm.c | 591 if (adev->mman.aper_base_kaddr && in amdgpu_ttm_io_mem_reserve() 1550 adev->mman.fw_vram_usage_va = NULL; in amdgpu_ttm_fw_reserve_vram_init() 1630 adev->mman.discovery_tmr_size = in amdgpu_ttm_reserve_tmr() 1632 if (!adev->mman.discovery_tmr_size) in amdgpu_ttm_reserve_tmr() 1654 adev->mman.discovery_tmr_size, in amdgpu_ttm_reserve_tmr() 1656 &adev->mman.discovery_memory, in amdgpu_ttm_reserve_tmr() 1694 adev->mman.initialized = true; in amdgpu_ttm_init() 1737 if (adev->mman.discovery_bin) { in amdgpu_ttm_init() 1827 if (!adev->mman.initialized) in amdgpu_ttm_fini() 1847 ttm_device_fini(&adev->mman.bdev); in amdgpu_ttm_fini() [all …]
|
| A D | amdgpu_preempt_mgr.c | 50 man = ttm_manager_type(&adev->mman.bdev, AMDGPU_PL_PREEMPT); in mem_info_preempt_used_show() 148 struct amdgpu_preempt_mgr *mgr = &adev->mman.preempt_mgr; in amdgpu_preempt_mgr_init() 165 ttm_set_driver_manager(&adev->mman.bdev, AMDGPU_PL_PREEMPT, in amdgpu_preempt_mgr_init() 181 struct amdgpu_preempt_mgr *mgr = &adev->mman.preempt_mgr; in amdgpu_preempt_mgr_fini() 187 ret = ttm_resource_manager_evict_all(&adev->mman.bdev, man); in amdgpu_preempt_mgr_fini() 194 ttm_set_driver_manager(&adev->mman.bdev, AMDGPU_PL_PREEMPT, NULL); in amdgpu_preempt_mgr_fini()
|
| A D | amdgpu_discovery.c | 188 adev->mman.discovery_tmr_size, false); in amdgpu_discovery_read_binary() 222 adev->mman.discovery_bin = kzalloc(adev->mman.discovery_tmr_size, GFP_KERNEL); in amdgpu_discovery_init() 223 if (!adev->mman.discovery_bin) in amdgpu_discovery_init() 232 adev->mman.discovery_tmr_size); in amdgpu_discovery_init() 296 kfree(adev->mman.discovery_bin); in amdgpu_discovery_init() 297 adev->mman.discovery_bin = NULL; in amdgpu_discovery_init() 304 kfree(adev->mman.discovery_bin); in amdgpu_discovery_fini() 305 adev->mman.discovery_bin = NULL; in amdgpu_discovery_fini() 440 if (!adev->mman.discovery_bin) { in amdgpu_discovery_get_ip_version() 539 if (!adev->mman.discovery_bin) { in amdgpu_discovery_get_gfx_info() [all …]
|
| A D | amdgpu_gtt_mgr.c | 62 man = ttm_manager_type(&adev->mman.bdev, TTM_PL_TT); in amdgpu_mem_info_gtt_total_show() 82 man = ttm_manager_type(&adev->mman.bdev, TTM_PL_TT); in amdgpu_mem_info_gtt_used_show() 235 adev = container_of(mgr, typeof(*adev), mman.gtt_mgr); in amdgpu_gtt_mgr_recover() 287 struct amdgpu_gtt_mgr *mgr = &adev->mman.gtt_mgr; in amdgpu_gtt_mgr_init() 302 ttm_set_driver_manager(&adev->mman.bdev, TTM_PL_TT, &mgr->manager); in amdgpu_gtt_mgr_init() 317 struct amdgpu_gtt_mgr *mgr = &adev->mman.gtt_mgr; in amdgpu_gtt_mgr_fini() 323 ret = ttm_resource_manager_evict_all(&adev->mman.bdev, man); in amdgpu_gtt_mgr_fini() 332 ttm_set_driver_manager(&adev->mman.bdev, TTM_PL_TT, NULL); in amdgpu_gtt_mgr_fini()
|
| A D | amdgpu_gmc.c | 688 adev->mman.keep_stolen_vga_memory = true; in amdgpu_gmc_get_vbios_allocations() 691 adev->mman.keep_stolen_vga_memory = false; in amdgpu_gmc_get_vbios_allocations() 701 if (adev->mman.keep_stolen_vga_memory) in amdgpu_gmc_get_vbios_allocations() 710 adev->mman.stolen_vga_size = AMDGPU_VBIOS_VGA_ALLOCATION; in amdgpu_gmc_get_vbios_allocations() 711 adev->mman.stolen_extended_size = size - adev->mman.stolen_vga_size; in amdgpu_gmc_get_vbios_allocations() 713 adev->mman.stolen_vga_size = size; in amdgpu_gmc_get_vbios_allocations() 714 adev->mman.stolen_extended_size = 0; in amdgpu_gmc_get_vbios_allocations() 814 adev->mman.stolen_reserved_offset = 0; in amdgpu_gmc_get_reserved_allocation() 815 adev->mman.stolen_reserved_size = 0; in amdgpu_gmc_get_reserved_allocation() 820 adev->mman.stolen_reserved_offset = 0x1ffb0000; in amdgpu_gmc_get_reserved_allocation() [all …]
|
| A D | amdgpu_vram_mgr.c | 48 return container_of(mgr, struct amdgpu_device, mman.vram_mgr); in to_amdgpu_device() 101 man = ttm_manager_type(&adev->mman.bdev, TTM_PL_VRAM); in amdgpu_mem_info_vram_used_show() 121 man = ttm_manager_type(&adev->mman.bdev, TTM_PL_VRAM); in amdgpu_mem_info_vis_vram_used_show() 697 struct amdgpu_vram_mgr *mgr = &adev->mman.vram_mgr; in amdgpu_vram_mgr_init() 709 ttm_set_driver_manager(&adev->mman.bdev, TTM_PL_VRAM, &mgr->manager); in amdgpu_vram_mgr_init() 724 struct amdgpu_vram_mgr *mgr = &adev->mman.vram_mgr; in amdgpu_vram_mgr_fini() 731 ret = ttm_resource_manager_evict_all(&adev->mman.bdev, man); in amdgpu_vram_mgr_fini() 747 ttm_set_driver_manager(&adev->mman.bdev, TTM_PL_VRAM, NULL); in amdgpu_vram_mgr_fini()
|
| A D | amdgpu_sdma.h | 116 #define amdgpu_emit_copy_buffer(adev, ib, s, d, b, t) (adev)->mman.buffer_funcs->emit_copy_buffer((… 117 #define amdgpu_emit_fill_buffer(adev, ib, s, d, b) (adev)->mman.buffer_funcs->emit_fill_buffer((ib)…
|
| A D | gmc_v10_0.c | 317 struct amdgpu_ring *ring = adev->mman.buffer_funcs_ring; in gmc_v10_0_flush_gpu_tlb() 345 mutex_lock(&adev->mman.gtt_window_lock); in gmc_v10_0_flush_gpu_tlb() 349 mutex_unlock(&adev->mman.gtt_window_lock); in gmc_v10_0_flush_gpu_tlb() 355 if (!adev->mman.buffer_funcs_enabled || in gmc_v10_0_flush_gpu_tlb() 360 mutex_unlock(&adev->mman.gtt_window_lock); in gmc_v10_0_flush_gpu_tlb() 378 r = amdgpu_job_submit(job, &adev->mman.entity, in gmc_v10_0_flush_gpu_tlb() 383 mutex_unlock(&adev->mman.gtt_window_lock); in gmc_v10_0_flush_gpu_tlb() 394 mutex_unlock(&adev->mman.gtt_window_lock); in gmc_v10_0_flush_gpu_tlb()
|
| A D | amdgpu_benchmark.c | 41 struct amdgpu_ring *ring = adev->mman.buffer_funcs_ring; in amdgpu_benchmark_do_move() 129 if (adev->mman.buffer_funcs) { in amdgpu_benchmark_move()
|
| A D | amdgpu_test.c | 34 struct amdgpu_ring *ring = adev->mman.buffer_funcs_ring; in amdgpu_do_test_moves() 248 if (adev->mman.buffer_funcs) in amdgpu_test_moves()
|
| A D | amdgpu_virt.c | 415 retired_page = *(uint64_t *)(adev->mman.fw_vram_usage_va + in amdgpu_virt_add_bad_page() 551 struct ttm_resource_manager *vram_man = ttm_manager_type(&adev->mman.bdev, TTM_PL_VRAM); in amdgpu_virt_write_vf2pf_data() 628 if (adev->mman.fw_vram_usage_va != NULL) { in amdgpu_virt_init_data_exchange() 633 (adev->mman.fw_vram_usage_va + (AMD_SRIOV_MSG_PF2VF_OFFSET_KB << 10)); in amdgpu_virt_init_data_exchange() 636 (adev->mman.fw_vram_usage_va + (AMD_SRIOV_MSG_VF2PF_OFFSET_KB << 10)); in amdgpu_virt_init_data_exchange()
|
| A D | psp_v11_0.c | 688 if (adev->gmc.visible_vram_size < sz || !adev->mman.aper_base_kaddr) { in psp_v11_0_memory_training() 691 adev->mman.aper_base_kaddr); in psp_v11_0_memory_training() 702 memcpy_fromio(buf, adev->mman.aper_base_kaddr, sz); in psp_v11_0_memory_training() 711 memcpy_toio(adev->mman.aper_base_kaddr, buf, sz); in psp_v11_0_memory_training()
|
| A D | sdma_v5_2.c | 481 if ((adev->mman.buffer_funcs_ring == sdma0) || in sdma_v5_2_gfx_stop() 482 (adev->mman.buffer_funcs_ring == sdma1) || in sdma_v5_2_gfx_stop() 483 (adev->mman.buffer_funcs_ring == sdma2) || in sdma_v5_2_gfx_stop() 484 (adev->mman.buffer_funcs_ring == sdma3)) in sdma_v5_2_gfx_stop() 745 if (adev->mman.buffer_funcs_ring == ring) in sdma_v5_2_gfx_resume() 1792 if (adev->mman.buffer_funcs == NULL) { in sdma_v5_2_set_buffer_funcs() 1793 adev->mman.buffer_funcs = &sdma_v5_2_buffer_funcs; in sdma_v5_2_set_buffer_funcs() 1794 adev->mman.buffer_funcs_ring = &adev->sdma.instance[0].ring; in sdma_v5_2_set_buffer_funcs()
|
| /linux/drivers/gpu/drm/qxl/ |
| A D | qxl_ttm.c | 42 struct qxl_mman *mman; in qxl_get_qdev() local 45 mman = container_of(bdev, struct qxl_mman, bdev); in qxl_get_qdev() 46 qdev = container_of(mman, struct qxl_device, mman); in qxl_get_qdev() 178 return ttm_range_man_init(&qdev->mman.bdev, type, false, size); in qxl_ttm_init_mem_type() 187 r = ttm_device_init(&qdev->mman.bdev, &qxl_bo_driver, NULL, in qxl_ttm_init() 219 ttm_range_man_fini(&qdev->mman.bdev, TTM_PL_VRAM); in qxl_ttm_fini() 220 ttm_range_man_fini(&qdev->mman.bdev, TTM_PL_PRIV); in qxl_ttm_fini() 221 ttm_device_fini(&qdev->mman.bdev); in qxl_ttm_fini() 255 qxl_mem_types_list[i].data = ttm_manager_type(&qdev->mman.bdev, TTM_PL_VRAM); in qxl_ttm_debugfs_init() 257 qxl_mem_types_list[i].data = ttm_manager_type(&qdev->mman.bdev, TTM_PL_PRIV); in qxl_ttm_debugfs_init()
|
| A D | qxl_object.c | 144 r = ttm_bo_init_reserved(&qdev->mman.bdev, &bo->tbo, size, type, in qxl_bo_create() 404 man = ttm_manager_type(&qdev->mman.bdev, TTM_PL_PRIV); in qxl_surf_evict() 405 return ttm_resource_manager_evict_all(&qdev->mman.bdev, man); in qxl_surf_evict() 412 man = ttm_manager_type(&qdev->mman.bdev, TTM_PL_VRAM); in qxl_vram_evict() 413 return ttm_resource_manager_evict_all(&qdev->mman.bdev, man); in qxl_vram_evict()
|
| /linux/drivers/gpu/drm/radeon/ |
| A D | radeon_ttm.c | 62 struct radeon_mman *mman; in radeon_get_rdev() local 65 mman = container_of(bdev, struct radeon_mman, bdev); in radeon_get_rdev() 66 rdev = container_of(mman, struct radeon_device, mman); in radeon_get_rdev() 78 return ttm_range_man_init(&rdev->mman.bdev, TTM_PL_TT, in radeon_ttm_init_gtt() 585 return ttm_pool_free(&rdev->mman.bdev.pool, ttm); in radeon_ttm_tt_unpopulate() 718 rdev->mman.initialized = true; in radeon_ttm_init() 763 if (!rdev->mman.initialized) in radeon_ttm_fini() 775 ttm_range_man_fini(&rdev->mman.bdev, TTM_PL_TT); in radeon_ttm_fini() 776 ttm_device_fini(&rdev->mman.bdev); in radeon_ttm_fini() 778 rdev->mman.initialized = false; in radeon_ttm_fini() [all …]
|
| /linux/tools/perf/trace/beauty/ |
| A D | mmap_flags.sh | 15 linux_mman=${linux_header_dir}/mman.h 16 arch_mman=${arch_header_dir}/mman.h 32 (egrep $regex ${header_dir}/mman-common.h | \ 37 (egrep $regex ${header_dir}/mman.h | \
|
| A D | mmap_prot.sh | 13 common_mman=${asm_header_dir}/mman-common.h 14 arch_mman=${arch_header_dir}/mman.h
|
| A D | madvise_behavior.sh | 8 egrep $regex ${header_dir}/mman-common.h | \
|
| A D | pkey_alloc_access_rights.sh | 8 egrep $regex ${header_dir}/mman-common.h | \
|
| A D | mremap_flags.sh | 10 linux_mman=${linux_header_dir}/mman.h
|
| /linux/tools/perf/ |
| A D | check-headers.sh | 149 check include/uapi/asm-generic/mman.h '-I "^#include <\(uapi/\)*asm-generic/mman-common\(-tools\)*.… 150 check include/uapi/linux/mman.h '-I "^#include <\(uapi/\)*asm/mman.h>"'
|
| A D | Makefile.perf | 488 $(pkey_alloc_access_rights_array): $(asm_generic_hdr_dir)/mman-common.h $(pkey_alloc_access_rights_… 549 $(madvise_behavior_array): $(madvise_hdr_dir)/mman-common.h $(madvise_behavior_tbl) 555 $(mmap_flags_array): $(linux_uapi_dir)/mman.h $(asm_generic_uapi_dir)/mman.h $(asm_generic_uapi_dir… 561 $(mremap_flags_array): $(linux_uapi_dir)/mman.h $(mremap_flags_tbl) 580 $(mmap_prot_array): $(asm_generic_uapi_dir)/mman.h $(asm_generic_uapi_dir)/mman-common.h $(mmap_pro…
|
| /linux/drivers/gpu/drm/amd/amdkfd/ |
| A D | kfd_migrate.c | 63 num_dw = ALIGN(adev->mman.buffer_funcs->copy_num_dw, 8); in svm_migrate_gart_map() 93 r = amdgpu_job_submit(job, &adev->mman.entity, in svm_migrate_gart_map() 137 struct amdgpu_ring *ring = adev->mman.buffer_funcs_ring; in svm_migrate_copy_memory_gart() 143 mutex_lock(&adev->mman.gtt_window_lock); in svm_migrate_copy_memory_gart() 179 mutex_unlock(&adev->mman.gtt_window_lock); in svm_migrate_copy_memory_gart()
|
| /linux/drivers/gpu/drm/i915/selftests/ |
| A D | i915_live_selftests.h | 32 selftest(mman, i915_gem_mman_live_selftests)
|