Lines Matching refs:bo_va

1824 	struct amdgpu_bo_va *bo_va, *tmp;  in amdgpu_vm_get_memory()  local
1826 list_for_each_entry_safe(bo_va, tmp, &vm->idle, base.vm_status) { in amdgpu_vm_get_memory()
1827 if (!bo_va->base.bo) in amdgpu_vm_get_memory()
1829 amdgpu_bo_get_memory(bo_va->base.bo, vram_mem, in amdgpu_vm_get_memory()
1832 list_for_each_entry_safe(bo_va, tmp, &vm->evicted, base.vm_status) { in amdgpu_vm_get_memory()
1833 if (!bo_va->base.bo) in amdgpu_vm_get_memory()
1835 amdgpu_bo_get_memory(bo_va->base.bo, vram_mem, in amdgpu_vm_get_memory()
1838 list_for_each_entry_safe(bo_va, tmp, &vm->relocated, base.vm_status) { in amdgpu_vm_get_memory()
1839 if (!bo_va->base.bo) in amdgpu_vm_get_memory()
1841 amdgpu_bo_get_memory(bo_va->base.bo, vram_mem, in amdgpu_vm_get_memory()
1844 list_for_each_entry_safe(bo_va, tmp, &vm->moved, base.vm_status) { in amdgpu_vm_get_memory()
1845 if (!bo_va->base.bo) in amdgpu_vm_get_memory()
1847 amdgpu_bo_get_memory(bo_va->base.bo, vram_mem, in amdgpu_vm_get_memory()
1851 list_for_each_entry_safe(bo_va, tmp, &vm->invalidated, base.vm_status) { in amdgpu_vm_get_memory()
1852 if (!bo_va->base.bo) in amdgpu_vm_get_memory()
1854 amdgpu_bo_get_memory(bo_va->base.bo, vram_mem, in amdgpu_vm_get_memory()
1857 list_for_each_entry_safe(bo_va, tmp, &vm->done, base.vm_status) { in amdgpu_vm_get_memory()
1858 if (!bo_va->base.bo) in amdgpu_vm_get_memory()
1860 amdgpu_bo_get_memory(bo_va->base.bo, vram_mem, in amdgpu_vm_get_memory()
1878 int amdgpu_vm_bo_update(struct amdgpu_device *adev, struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_update() argument
1881 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_update()
1882 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_update()
1899 if (obj->import_attach && bo_va->is_xgmi) { in amdgpu_vm_bo_update()
1928 last_update = &bo_va->last_pt_update; in amdgpu_vm_bo_update()
1930 if (!clear && bo_va->base.moved) { in amdgpu_vm_bo_update()
1931 bo_va->base.moved = false; in amdgpu_vm_bo_update()
1932 list_splice_init(&bo_va->valids, &bo_va->invalids); in amdgpu_vm_bo_update()
1934 } else if (bo_va->cleared != clear) { in amdgpu_vm_bo_update()
1935 list_splice_init(&bo_va->valids, &bo_va->invalids); in amdgpu_vm_bo_update()
1938 list_for_each_entry(mapping, &bo_va->invalids, list) { in amdgpu_vm_bo_update()
1972 amdgpu_vm_bo_evicted(&bo_va->base); in amdgpu_vm_bo_update()
1974 amdgpu_vm_bo_idle(&bo_va->base); in amdgpu_vm_bo_update()
1976 amdgpu_vm_bo_done(&bo_va->base); in amdgpu_vm_bo_update()
1979 list_splice_init(&bo_va->invalids, &bo_va->valids); in amdgpu_vm_bo_update()
1980 bo_va->cleared = clear; in amdgpu_vm_bo_update()
1983 list_for_each_entry(mapping, &bo_va->valids, list) in amdgpu_vm_bo_update()
2203 struct amdgpu_bo_va *bo_va, *tmp; in amdgpu_vm_handle_moved() local
2208 list_for_each_entry_safe(bo_va, tmp, &vm->moved, base.vm_status) { in amdgpu_vm_handle_moved()
2210 r = amdgpu_vm_bo_update(adev, bo_va, false, NULL); in amdgpu_vm_handle_moved()
2217 bo_va = list_first_entry(&vm->invalidated, struct amdgpu_bo_va, in amdgpu_vm_handle_moved()
2219 resv = bo_va->base.bo->tbo.base.resv; in amdgpu_vm_handle_moved()
2229 r = amdgpu_vm_bo_update(adev, bo_va, clear, NULL); in amdgpu_vm_handle_moved()
2261 struct amdgpu_bo_va *bo_va; in amdgpu_vm_bo_add() local
2263 bo_va = kzalloc(sizeof(struct amdgpu_bo_va), GFP_KERNEL); in amdgpu_vm_bo_add()
2264 if (bo_va == NULL) { in amdgpu_vm_bo_add()
2267 amdgpu_vm_bo_base_init(&bo_va->base, vm, bo); in amdgpu_vm_bo_add()
2269 bo_va->ref_count = 1; in amdgpu_vm_bo_add()
2270 INIT_LIST_HEAD(&bo_va->valids); in amdgpu_vm_bo_add()
2271 INIT_LIST_HEAD(&bo_va->invalids); in amdgpu_vm_bo_add()
2274 return bo_va; in amdgpu_vm_bo_add()
2277 bo_va->is_xgmi = true; in amdgpu_vm_bo_add()
2282 return bo_va; in amdgpu_vm_bo_add()
2296 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_insert_map() argument
2299 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_insert_map()
2300 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_insert_map()
2302 mapping->bo_va = bo_va; in amdgpu_vm_bo_insert_map()
2303 list_add(&mapping->list, &bo_va->invalids); in amdgpu_vm_bo_insert_map()
2310 !bo_va->base.moved) { in amdgpu_vm_bo_insert_map()
2311 list_move(&bo_va->base.vm_status, &vm->moved); in amdgpu_vm_bo_insert_map()
2313 trace_amdgpu_vm_bo_map(bo_va, mapping); in amdgpu_vm_bo_insert_map()
2334 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_map() argument
2339 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_map()
2340 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_map()
2376 amdgpu_vm_bo_insert_map(adev, bo_va, mapping); in amdgpu_vm_bo_map()
2400 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_replace_map() argument
2405 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_replace_map()
2426 r = amdgpu_vm_bo_clear_mappings(adev, bo_va->base.vm, saddr, size); in amdgpu_vm_bo_replace_map()
2440 amdgpu_vm_bo_insert_map(adev, bo_va, mapping); in amdgpu_vm_bo_replace_map()
2460 struct amdgpu_bo_va *bo_va, in amdgpu_vm_bo_unmap() argument
2464 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_unmap()
2469 list_for_each_entry(mapping, &bo_va->valids, list) { in amdgpu_vm_bo_unmap()
2474 if (&mapping->list == &bo_va->valids) { in amdgpu_vm_bo_unmap()
2477 list_for_each_entry(mapping, &bo_va->invalids, list) { in amdgpu_vm_bo_unmap()
2482 if (&mapping->list == &bo_va->invalids) in amdgpu_vm_bo_unmap()
2488 mapping->bo_va = NULL; in amdgpu_vm_bo_unmap()
2489 trace_amdgpu_vm_bo_unmap(bo_va, mapping); in amdgpu_vm_bo_unmap()
2495 bo_va->last_pt_update); in amdgpu_vm_bo_unmap()
2547 before->bo_va = tmp->bo_va; in amdgpu_vm_bo_clear_mappings()
2548 list_add(&before->list, &tmp->bo_va->invalids); in amdgpu_vm_bo_clear_mappings()
2558 after->bo_va = tmp->bo_va; in amdgpu_vm_bo_clear_mappings()
2559 list_add(&after->list, &tmp->bo_va->invalids); in amdgpu_vm_bo_clear_mappings()
2578 tmp->bo_va = NULL; in amdgpu_vm_bo_clear_mappings()
2639 if (mapping->bo_va && mapping->bo_va->base.bo) { in amdgpu_vm_bo_trace_cs()
2642 bo = mapping->bo_va->base.bo; in amdgpu_vm_bo_trace_cs()
2663 struct amdgpu_bo_va *bo_va) in amdgpu_vm_bo_rmv() argument
2666 struct amdgpu_bo *bo = bo_va->base.bo; in amdgpu_vm_bo_rmv()
2667 struct amdgpu_vm *vm = bo_va->base.vm; in amdgpu_vm_bo_rmv()
2674 for (base = &bo_va->base.bo->vm_bo; *base; in amdgpu_vm_bo_rmv()
2676 if (*base != &bo_va->base) in amdgpu_vm_bo_rmv()
2679 *base = bo_va->base.next; in amdgpu_vm_bo_rmv()
2685 list_del(&bo_va->base.vm_status); in amdgpu_vm_bo_rmv()
2688 list_for_each_entry_safe(mapping, next, &bo_va->valids, list) { in amdgpu_vm_bo_rmv()
2691 mapping->bo_va = NULL; in amdgpu_vm_bo_rmv()
2692 trace_amdgpu_vm_bo_unmap(bo_va, mapping); in amdgpu_vm_bo_rmv()
2695 list_for_each_entry_safe(mapping, next, &bo_va->invalids, list) { in amdgpu_vm_bo_rmv()
2699 bo_va->last_pt_update); in amdgpu_vm_bo_rmv()
2702 dma_fence_put(bo_va->last_pt_update); in amdgpu_vm_bo_rmv()
2704 if (bo && bo_va->is_xgmi) in amdgpu_vm_bo_rmv()
2707 kfree(bo_va); in amdgpu_vm_bo_rmv()
3466 struct amdgpu_bo_va *bo_va, *tmp; in amdgpu_debugfs_vm_bo_info() local
3482 list_for_each_entry_safe(bo_va, tmp, &vm->idle, base.vm_status) { in amdgpu_debugfs_vm_bo_info()
3483 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
3485 total_idle += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
3491 list_for_each_entry_safe(bo_va, tmp, &vm->evicted, base.vm_status) { in amdgpu_debugfs_vm_bo_info()
3492 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
3494 total_evicted += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
3500 list_for_each_entry_safe(bo_va, tmp, &vm->relocated, base.vm_status) { in amdgpu_debugfs_vm_bo_info()
3501 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
3503 total_relocated += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
3509 list_for_each_entry_safe(bo_va, tmp, &vm->moved, base.vm_status) { in amdgpu_debugfs_vm_bo_info()
3510 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
3512 total_moved += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
3519 list_for_each_entry_safe(bo_va, tmp, &vm->invalidated, base.vm_status) { in amdgpu_debugfs_vm_bo_info()
3520 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
3522 total_invalidated += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()
3528 list_for_each_entry_safe(bo_va, tmp, &vm->done, base.vm_status) { in amdgpu_debugfs_vm_bo_info()
3529 if (!bo_va->base.bo) in amdgpu_debugfs_vm_bo_info()
3531 total_done += amdgpu_bo_print_info(id++, bo_va->base.bo, m); in amdgpu_debugfs_vm_bo_info()