Lines Matching refs:obj
43 struct drm_i915_gem_object *obj; in i915_gem_object_alloc() local
45 obj = kmem_cache_zalloc(slab_objects, GFP_KERNEL); in i915_gem_object_alloc()
46 if (!obj) in i915_gem_object_alloc()
48 obj->base.funcs = &i915_gem_object_funcs; in i915_gem_object_alloc()
50 return obj; in i915_gem_object_alloc()
53 void i915_gem_object_free(struct drm_i915_gem_object *obj) in i915_gem_object_free() argument
55 return kmem_cache_free(slab_objects, obj); in i915_gem_object_free()
58 void i915_gem_object_init(struct drm_i915_gem_object *obj, in i915_gem_object_init() argument
66 BUILD_BUG_ON(offsetof(typeof(*obj), base) != in i915_gem_object_init()
67 offsetof(typeof(*obj), __do_not_access.base)); in i915_gem_object_init()
69 spin_lock_init(&obj->vma.lock); in i915_gem_object_init()
70 INIT_LIST_HEAD(&obj->vma.list); in i915_gem_object_init()
72 INIT_LIST_HEAD(&obj->mm.link); in i915_gem_object_init()
74 INIT_LIST_HEAD(&obj->lut_list); in i915_gem_object_init()
75 spin_lock_init(&obj->lut_lock); in i915_gem_object_init()
77 spin_lock_init(&obj->mmo.lock); in i915_gem_object_init()
78 obj->mmo.offsets = RB_ROOT; in i915_gem_object_init()
80 init_rcu_head(&obj->rcu); in i915_gem_object_init()
82 obj->ops = ops; in i915_gem_object_init()
84 obj->flags = flags; in i915_gem_object_init()
86 obj->mm.madv = I915_MADV_WILLNEED; in i915_gem_object_init()
87 INIT_RADIX_TREE(&obj->mm.get_page.radix, GFP_KERNEL | __GFP_NOWARN); in i915_gem_object_init()
88 mutex_init(&obj->mm.get_page.lock); in i915_gem_object_init()
89 INIT_RADIX_TREE(&obj->mm.get_dma_page.radix, GFP_KERNEL | __GFP_NOWARN); in i915_gem_object_init()
90 mutex_init(&obj->mm.get_dma_page.lock); in i915_gem_object_init()
102 void __i915_gem_object_fini(struct drm_i915_gem_object *obj) in __i915_gem_object_fini() argument
104 mutex_destroy(&obj->mm.get_page.lock); in __i915_gem_object_fini()
105 mutex_destroy(&obj->mm.get_dma_page.lock); in __i915_gem_object_fini()
106 dma_resv_fini(&obj->base._resv); in __i915_gem_object_fini()
114 void i915_gem_object_set_cache_coherency(struct drm_i915_gem_object *obj, in i915_gem_object_set_cache_coherency() argument
117 obj->cache_level = cache_level; in i915_gem_object_set_cache_coherency()
120 obj->cache_coherent = (I915_BO_CACHE_COHERENT_FOR_READ | in i915_gem_object_set_cache_coherency()
122 else if (HAS_LLC(to_i915(obj->base.dev))) in i915_gem_object_set_cache_coherency()
123 obj->cache_coherent = I915_BO_CACHE_COHERENT_FOR_READ; in i915_gem_object_set_cache_coherency()
125 obj->cache_coherent = 0; in i915_gem_object_set_cache_coherency()
127 obj->cache_dirty = in i915_gem_object_set_cache_coherency()
128 !(obj->cache_coherent & I915_BO_CACHE_COHERENT_FOR_WRITE); in i915_gem_object_set_cache_coherency()
131 bool i915_gem_object_can_bypass_llc(struct drm_i915_gem_object *obj) in i915_gem_object_can_bypass_llc() argument
133 struct drm_i915_private *i915 = to_i915(obj->base.dev); in i915_gem_object_can_bypass_llc()
139 if (!(obj->flags & I915_BO_ALLOC_USER)) in i915_gem_object_can_bypass_llc()
159 struct drm_i915_gem_object *obj = to_intel_bo(gem); in i915_gem_close_object() local
166 spin_lock(&obj->lut_lock); in i915_gem_close_object()
167 list_for_each_entry_safe(lut, ln, &obj->lut_list, obj_link) { in i915_gem_close_object()
176 if (&ln->obj_link != &obj->lut_list) { in i915_gem_close_object()
178 if (cond_resched_lock(&obj->lut_lock)) in i915_gem_close_object()
183 spin_unlock(&obj->lut_lock); in i915_gem_close_object()
185 spin_lock(&obj->mmo.lock); in i915_gem_close_object()
186 rbtree_postorder_for_each_entry_safe(mmo, mn, &obj->mmo.offsets, offset) in i915_gem_close_object()
188 spin_unlock(&obj->mmo.lock); in i915_gem_close_object()
202 GEM_BUG_ON(vma->obj != obj); in i915_gem_close_object()
210 i915_gem_object_put(obj); in i915_gem_close_object()
216 struct drm_i915_gem_object *obj = in __i915_gem_free_object_rcu() local
217 container_of(head, typeof(*obj), rcu); in __i915_gem_free_object_rcu()
218 struct drm_i915_private *i915 = to_i915(obj->base.dev); in __i915_gem_free_object_rcu()
220 i915_gem_object_free(obj); in __i915_gem_free_object_rcu()
226 static void __i915_gem_object_free_mmaps(struct drm_i915_gem_object *obj) in __i915_gem_object_free_mmaps() argument
230 if (obj->userfault_count) in __i915_gem_object_free_mmaps()
231 i915_gem_object_release_mmap_gtt(obj); in __i915_gem_object_free_mmaps()
233 if (!RB_EMPTY_ROOT(&obj->mmo.offsets)) { in __i915_gem_object_free_mmaps()
236 i915_gem_object_release_mmap_offset(obj); in __i915_gem_object_free_mmaps()
239 &obj->mmo.offsets, in __i915_gem_object_free_mmaps()
241 drm_vma_offset_remove(obj->base.dev->vma_offset_manager, in __i915_gem_object_free_mmaps()
245 obj->mmo.offsets = RB_ROOT; in __i915_gem_object_free_mmaps()
258 void __i915_gem_object_pages_fini(struct drm_i915_gem_object *obj) in __i915_gem_object_pages_fini() argument
260 if (!list_empty(&obj->vma.list)) { in __i915_gem_object_pages_fini()
269 spin_lock(&obj->vma.lock); in __i915_gem_object_pages_fini()
270 while ((vma = list_first_entry_or_null(&obj->vma.list, in __i915_gem_object_pages_fini()
273 GEM_BUG_ON(vma->obj != obj); in __i915_gem_object_pages_fini()
274 spin_unlock(&obj->vma.lock); in __i915_gem_object_pages_fini()
278 spin_lock(&obj->vma.lock); in __i915_gem_object_pages_fini()
280 spin_unlock(&obj->vma.lock); in __i915_gem_object_pages_fini()
283 __i915_gem_object_free_mmaps(obj); in __i915_gem_object_pages_fini()
285 atomic_set(&obj->mm.pages_pin_count, 0); in __i915_gem_object_pages_fini()
286 __i915_gem_object_put_pages(obj); in __i915_gem_object_pages_fini()
287 GEM_BUG_ON(i915_gem_object_has_pages(obj)); in __i915_gem_object_pages_fini()
290 void __i915_gem_free_object(struct drm_i915_gem_object *obj) in __i915_gem_free_object() argument
292 trace_i915_gem_object_destroy(obj); in __i915_gem_free_object()
294 GEM_BUG_ON(!list_empty(&obj->lut_list)); in __i915_gem_free_object()
296 bitmap_free(obj->bit_17); in __i915_gem_free_object()
298 if (obj->base.import_attach) in __i915_gem_free_object()
299 drm_prime_gem_destroy(&obj->base, NULL); in __i915_gem_free_object()
301 drm_gem_free_mmap_offset(&obj->base); in __i915_gem_free_object()
303 if (obj->ops->release) in __i915_gem_free_object()
304 obj->ops->release(obj); in __i915_gem_free_object()
306 if (obj->mm.n_placements > 1) in __i915_gem_free_object()
307 kfree(obj->mm.placements); in __i915_gem_free_object()
309 if (obj->shares_resv_from) in __i915_gem_free_object()
310 i915_vm_resv_put(obj->shares_resv_from); in __i915_gem_free_object()
312 __i915_gem_object_fini(obj); in __i915_gem_free_object()
318 struct drm_i915_gem_object *obj, *on; in __i915_gem_free_objects() local
320 llist_for_each_entry_safe(obj, on, freed, freed) { in __i915_gem_free_objects()
322 if (obj->ops->delayed_free) { in __i915_gem_free_objects()
323 obj->ops->delayed_free(obj); in __i915_gem_free_objects()
326 __i915_gem_object_pages_fini(obj); in __i915_gem_free_objects()
327 __i915_gem_free_object(obj); in __i915_gem_free_objects()
330 call_rcu(&obj->rcu, __i915_gem_free_object_rcu); in __i915_gem_free_objects()
353 struct drm_i915_gem_object *obj = to_intel_bo(gem_obj); in i915_gem_free_object() local
354 struct drm_i915_private *i915 = to_i915(obj->base.dev); in i915_gem_free_object()
356 GEM_BUG_ON(i915_gem_object_is_framebuffer(obj)); in i915_gem_free_object()
373 i915_gem_object_make_unshrinkable(obj); in i915_gem_free_object()
386 if (llist_add(&obj->freed, &i915->mm.free_list)) in i915_gem_free_object()
390 void __i915_gem_object_flush_frontbuffer(struct drm_i915_gem_object *obj, in __i915_gem_object_flush_frontbuffer() argument
395 front = __intel_frontbuffer_get(obj); in __i915_gem_object_flush_frontbuffer()
402 void __i915_gem_object_invalidate_frontbuffer(struct drm_i915_gem_object *obj, in __i915_gem_object_invalidate_frontbuffer() argument
407 front = __intel_frontbuffer_get(obj); in __i915_gem_object_invalidate_frontbuffer()
415 i915_gem_object_read_from_page_kmap(struct drm_i915_gem_object *obj, u64 offset, void *dst, int siz… in i915_gem_object_read_from_page_kmap() argument
420 src_map = kmap_atomic(i915_gem_object_get_page(obj, offset >> PAGE_SHIFT)); in i915_gem_object_read_from_page_kmap()
423 if (!(obj->cache_coherent & I915_BO_CACHE_COHERENT_FOR_READ)) in i915_gem_object_read_from_page_kmap()
431 i915_gem_object_read_from_page_iomap(struct drm_i915_gem_object *obj, u64 offset, void *dst, int si… in i915_gem_object_read_from_page_iomap() argument
435 dma_addr_t dma = i915_gem_object_get_dma_address(obj, offset >> PAGE_SHIFT); in i915_gem_object_read_from_page_iomap()
437 src_map = io_mapping_map_wc(&obj->mm.region->iomap, in i915_gem_object_read_from_page_iomap()
438 dma - obj->mm.region->region.start, in i915_gem_object_read_from_page_iomap()
462 int i915_gem_object_read_from_page(struct drm_i915_gem_object *obj, u64 offset, void *dst, int size) in i915_gem_object_read_from_page() argument
464 GEM_BUG_ON(offset >= obj->base.size); in i915_gem_object_read_from_page()
466 GEM_BUG_ON(!i915_gem_object_has_pinned_pages(obj)); in i915_gem_object_read_from_page()
468 if (i915_gem_object_has_struct_page(obj)) in i915_gem_object_read_from_page()
469 i915_gem_object_read_from_page_kmap(obj, offset, dst, size); in i915_gem_object_read_from_page()
470 else if (i915_gem_object_has_iomem(obj)) in i915_gem_object_read_from_page()
471 i915_gem_object_read_from_page_iomap(obj, offset, dst, size); in i915_gem_object_read_from_page()
491 bool i915_gem_object_evictable(struct drm_i915_gem_object *obj) in i915_gem_object_evictable() argument
494 int pin_count = atomic_read(&obj->mm.pages_pin_count); in i915_gem_object_evictable()
499 spin_lock(&obj->vma.lock); in i915_gem_object_evictable()
500 list_for_each_entry(vma, &obj->vma.list, obj_link) { in i915_gem_object_evictable()
502 spin_unlock(&obj->vma.lock); in i915_gem_object_evictable()
508 spin_unlock(&obj->vma.lock); in i915_gem_object_evictable()
522 bool i915_gem_object_migratable(struct drm_i915_gem_object *obj) in i915_gem_object_migratable() argument
524 struct intel_memory_region *mr = READ_ONCE(obj->mm.region); in i915_gem_object_migratable()
529 return obj->mm.n_placements > 1; in i915_gem_object_migratable()
541 bool i915_gem_object_has_struct_page(const struct drm_i915_gem_object *obj) in i915_gem_object_has_struct_page() argument
544 if (IS_DGFX(to_i915(obj->base.dev)) && in i915_gem_object_has_struct_page()
545 i915_gem_object_evictable((void __force *)obj)) in i915_gem_object_has_struct_page()
546 assert_object_held_shared(obj); in i915_gem_object_has_struct_page()
548 return obj->mem_flags & I915_BO_FLAG_STRUCT_PAGE; in i915_gem_object_has_struct_page()
560 bool i915_gem_object_has_iomem(const struct drm_i915_gem_object *obj) in i915_gem_object_has_iomem() argument
563 if (IS_DGFX(to_i915(obj->base.dev)) && in i915_gem_object_has_iomem()
564 i915_gem_object_evictable((void __force *)obj)) in i915_gem_object_has_iomem()
565 assert_object_held_shared(obj); in i915_gem_object_has_iomem()
567 return obj->mem_flags & I915_BO_FLAG_IOMEM; in i915_gem_object_has_iomem()
587 bool i915_gem_object_can_migrate(struct drm_i915_gem_object *obj, in i915_gem_object_can_migrate() argument
590 struct drm_i915_private *i915 = to_i915(obj->base.dev); in i915_gem_object_can_migrate()
591 unsigned int num_allowed = obj->mm.n_placements; in i915_gem_object_can_migrate()
596 GEM_BUG_ON(obj->mm.madv != I915_MADV_WILLNEED); in i915_gem_object_can_migrate()
602 if (obj->mm.region == mr) in i915_gem_object_can_migrate()
605 if (!i915_gem_object_evictable(obj)) in i915_gem_object_can_migrate()
608 if (!obj->ops->migrate) in i915_gem_object_can_migrate()
611 if (!(obj->flags & I915_BO_ALLOC_USER)) in i915_gem_object_can_migrate()
618 if (mr == obj->mm.placements[i]) in i915_gem_object_can_migrate()
650 int i915_gem_object_migrate(struct drm_i915_gem_object *obj, in i915_gem_object_migrate() argument
654 struct drm_i915_private *i915 = to_i915(obj->base.dev); in i915_gem_object_migrate()
658 GEM_BUG_ON(obj->mm.madv != I915_MADV_WILLNEED); in i915_gem_object_migrate()
659 assert_object_held(obj); in i915_gem_object_migrate()
664 if (!i915_gem_object_can_migrate(obj, id)) in i915_gem_object_migrate()
667 if (!obj->ops->migrate) { in i915_gem_object_migrate()
668 if (GEM_WARN_ON(obj->mm.region != mr)) in i915_gem_object_migrate()
673 return obj->ops->migrate(obj, mr); in i915_gem_object_migrate()
684 bool i915_gem_object_placement_possible(struct drm_i915_gem_object *obj, in i915_gem_object_placement_possible() argument
689 if (!obj->mm.n_placements) { in i915_gem_object_placement_possible()
692 return i915_gem_object_has_iomem(obj); in i915_gem_object_placement_possible()
694 return i915_gem_object_has_pages(obj); in i915_gem_object_placement_possible()
702 for (i = 0; i < obj->mm.n_placements; i++) { in i915_gem_object_placement_possible()
703 if (obj->mm.placements[i]->type == type) in i915_gem_object_placement_possible()