Home
last modified time | relevance | path

Searched refs:fence (Results 1 – 25 of 342) sorted by relevance

12345678910>>...14

/linux/drivers/dma-buf/
A Ddma-fence.c155 fence = kzalloc(sizeof(*fence), GFP_KERNEL); in dma_fence_allocate_private_stub()
156 if (fence == NULL) in dma_fence_allocate_private_stub()
165 return fence; in dma_fence_allocate_private_stub()
405 if (!fence) in dma_fence_signal_timestamp()
457 if (!fence) in dma_fence_signal()
504 ret = fence->ops->wait(fence, intr, timeout); in dma_fence_wait_timeout()
529 fence->ops->get_driver_name(fence), in dma_fence_release()
530 fence->ops->get_timeline_name(fence), in dma_fence_release()
531 fence->context, fence->seqno)) { in dma_fence_release()
548 fence->ops->release(fence); in dma_fence_release()
[all …]
A Ddma-fence-chain.c46 dma_fence_put(fence); in dma_fence_chain_walk()
74 dma_fence_put(fence); in dma_fence_chain_walk()
152 struct dma_fence *f = chain ? chain->fence : fence; in dma_fence_chain_enable_signaling()
156 dma_fence_put(fence); in dma_fence_chain_enable_signaling()
167 dma_fence_chain_for_each(fence, fence) { in dma_fence_chain_signaled()
169 struct dma_fence *f = chain ? chain->fence : fence; in dma_fence_chain_signaled()
172 dma_fence_put(fence); in dma_fence_chain_signaled()
207 dma_fence_put(chain->fence); in dma_fence_chain_release()
208 dma_fence_free(fence); in dma_fence_chain_release()
233 struct dma_fence *fence, in dma_fence_chain_init() argument
[all …]
A Dst-dma-fence-chain.c190 dma_fence_put(fence); in find_seqno()
199 dma_fence_put(fence); in find_seqno()
212 dma_fence_get(fence); in find_seqno()
214 dma_fence_put(fence); in find_seqno()
225 dma_fence_get(fence); in find_seqno()
227 dma_fence_put(fence); in find_seqno()
269 dma_fence_put(fence); in find_signaled()
275 if (fence && fence != fc.chains[0]) { in find_signaled()
307 dma_fence_put(fence); in find_out_of_order()
322 fence ? fence->seqno : 0); in find_out_of_order()
[all …]
A Ddma-resv.c208 dma_fence_put(fence); in dma_resv_reserve_shared()
254 dma_fence_get(fence); in dma_resv_add_shared_fence()
307 if (fence) in dma_resv_add_excl_fence()
308 dma_fence_get(fence); in dma_resv_add_excl_fence()
366 if (!cursor->fence) in dma_resv_iter_walk_unlocked()
380 cursor->fence = dma_fence_get_rcu(cursor->fence); in dma_resv_iter_walk_unlocked()
381 if (!cursor->fence || !dma_fence_is_signaled(cursor->fence)) in dma_resv_iter_walk_unlocked()
401 return cursor->fence; in dma_resv_iter_first_unlocked()
426 return cursor->fence; in dma_resv_iter_next_unlocked()
450 if (!fence) in dma_resv_iter_first()
[all …]
A Dsync_file.c72 sync_file->fence = dma_fence_get(fence); in sync_file_create()
111 fence = dma_fence_get(sync_file->fence); in sync_file_get_fence()
114 return fence; in sync_file_get_fence()
136 struct dma_fence *fence = sync_file->fence; in sync_file_get_name() local
139 fence->ops->get_driver_name(fence), in sync_file_get_name()
140 fence->ops->get_timeline_name(fence), in sync_file_get_name()
141 fence->context, in sync_file_get_name()
142 fence->seqno); in sync_file_get_name()
357 data.fence = fd; in sync_file_ioctl_merge()
381 strlcpy(info->obj_name, fence->ops->get_timeline_name(fence), in sync_fill_fence_info()
[all …]
/linux/drivers/gpu/drm/i915/
A Di915_sw_fence.c132 return fn(fence, state); in __i915_sw_fence_notify()
138 debug_fence_free(fence); in i915_sw_fence_fini()
258 fence->error = 0; in i915_sw_fence_reinit()
290 if (fence == signaler) in __i915_sw_fence_check_if_after()
375 wq->private = fence; in __i915_sw_fence_await_sw_fence()
429 fence = xchg(&cb->base.fence, NULL); in timer_i915_sw_fence_wake()
430 if (!fence) in timer_i915_sw_fence_wake()
450 fence = xchg(&cb->base.fence, NULL); in dma_i915_sw_fence_wake_timer()
451 if (fence) { in dma_i915_sw_fence_wake_timer()
503 cb->fence = fence; in i915_sw_fence_await_dma_fence()
[all …]
A Di915_sw_fence.h41 void __i915_sw_fence_init(struct i915_sw_fence *fence,
46 #define i915_sw_fence_init(fence, fn) \ argument
50 __i915_sw_fence_init((fence), (fn), #fence, &__key); \
53 #define i915_sw_fence_init(fence, fn) \ argument
54 __i915_sw_fence_init((fence), (fn), NULL, NULL)
60 void i915_sw_fence_fini(struct i915_sw_fence *fence);
76 struct i915_sw_fence *fence; member
99 return atomic_read(&fence->pending) <= 0; in i915_sw_fence_signaled()
104 return atomic_read(&fence->pending) < 0; in i915_sw_fence_done()
109 wait_event(fence->wait, i915_sw_fence_done(fence)); in i915_sw_fence_wait()
[all …]
A Di915_request.c1034 fence = NULL; in i915_request_await_start()
1064 fence = &prev->fence; in i915_request_await_start()
1067 if (!fence) in i915_request_await_start()
1073 fence, 0, in i915_request_await_start()
1377 fence = *child++; in i915_request_await_execution()
1381 if (fence->context == rq->fence.context) in i915_request_await_execution()
1479 fence = *child++; in i915_request_await_dma_fence()
1488 if (fence->context == rq->fence.context) in i915_request_await_dma_fence()
1494 fence)) in i915_request_await_dma_fence()
1510 fence); in i915_request_await_dma_fence()
[all …]
A Di915_active.h64 void *fence, in __i915_active_fence_init() argument
67 RCU_INIT_POINTER(active->fence, fence); in __i915_active_fence_init()
76 struct dma_fence *fence);
103 struct dma_fence *fence; in i915_active_fence_get() local
106 fence = dma_fence_get_rcu_safe(&active->fence); in i915_active_fence_get()
109 return fence; in i915_active_fence_get()
123 return rcu_access_pointer(active->fence); in i915_active_fence_isset()
176 &rq->fence); in i915_active_add_request()
238 struct dma_fence *fence; in __i915_request_await_exclusive() local
242 if (fence) { in __i915_request_await_exclusive()
[all …]
/linux/drivers/gpu/drm/i915/gt/
A Dintel_ggtt_fencing.c76 val = fence->start + fence->size - I965_FENCE_PAGE; in i965_write_fence_reg()
241 GEM_BUG_ON(old->fence != fence); in fence_update()
269 vma->fence = fence; in fence_update()
286 struct i915_fence_reg *fence = vma->fence; in i915_vma_revoke_fence() local
290 if (!fence) in i915_vma_revoke_fence()
318 return fence->vma && i915_vma_is_active(fence->vma); in fence_is_active()
327 GEM_BUG_ON(fence->vma && fence->vma->fence != fence); in fence_find()
344 return fence; in fence_find()
365 fence = vma->fence; in __i915_vma_pin_fence()
388 GEM_BUG_ON(vma->fence != (set ? fence : NULL)); in __i915_vma_pin_fence()
[all …]
/linux/include/linux/
A Ddma-fence.h274 if (fence) in dma_fence_put()
286 if (fence) in dma_fence_get()
288 return fence; in dma_fence_get()
301 return fence; in dma_fence_get_rcu()
326 struct dma_fence *fence; in dma_fence_get_rcu_safe() local
329 if (!fence) in dma_fence_get_rcu_safe()
352 dma_fence_put(fence); in dma_fence_get_rcu_safe()
403 if (fence->ops->signaled && fence->ops->signaled(fence)) { in dma_fence_is_signaled_locked()
433 if (fence->ops->signaled && fence->ops->signaled(fence)) { in dma_fence_is_signaled()
434 dma_fence_signal(fence); in dma_fence_is_signaled()
[all …]
/linux/drivers/gpu/drm/nouveau/
A Dnouveau_fence.c61 list_del(&fence->head); in nouveau_fence_signal()
86 return from_fence(fence); in nouveau_local_fence()
96 fence = list_entry(fctx->pending.next, typeof(*fence), head); in nouveau_fence_context_kill()
141 fence = list_entry(fctx->pending.next, typeof(*fence), head); in nouveau_fence_update()
165 fence = list_entry(fctx->pending.next, typeof(*fence), head); in nouveau_fence_wait_uevent_handler()
216 fence->channel = chan; in nouveau_fence_emit()
228 ret = fctx->emit(fence); in nouveau_fence_emit()
386 if (fence) { in nouveau_fence_sync()
426 fence = kzalloc(sizeof(*fence), GFP_KERNEL); in nouveau_fence_new()
427 if (!fence) in nouveau_fence_new()
[all …]
/linux/drivers/gpu/drm/vgem/
A Dvgem_fence.c50 struct vgem_fence *fence = container_of(base, typeof(*fence), base); in vgem_fence_release() local
65 dma_fence_is_signaled(fence) ? fence->seqno : 0); in vgem_fence_timeline_value_str()
79 struct vgem_fence *fence = from_timer(fence, t, timer); in vgem_fence_timeout() local
89 fence = kzalloc(sizeof(*fence), GFP_KERNEL); in vgem_fence_create()
90 if (!fence) in vgem_fence_create()
94 dma_fence_init(&fence->base, &vgem_fence_ops, &fence->lock, in vgem_fence_create()
102 return &fence->base; in vgem_fence_create()
147 if (!fence) { in vgem_fence_attach_ioctl()
217 fence = idr_replace(&vfile->fence_idr, NULL, arg->fence); in vgem_fence_signal_ioctl()
219 if (!fence) in vgem_fence_signal_ioctl()
[all …]
/linux/drivers/gpu/drm/amd/amdgpu/
A Damdgpu_amdkfd_fence.c69 fence = kzalloc(sizeof(*fence), GFP_KERNEL); in amdgpu_amdkfd_fence_create()
70 if (fence == NULL) in amdgpu_amdkfd_fence_create()
75 fence->mm = mm; in amdgpu_amdkfd_fence_create()
79 dma_fence_init(&fence->base, &amdkfd_fence_ops, &fence->lock, in amdgpu_amdkfd_fence_create()
82 return fence; in amdgpu_amdkfd_fence_create()
94 return fence; in to_amdgpu_amdkfd_fence()
123 if (!fence) in amdkfd_fence_enable_signaling()
129 if (!fence->svm_bo) { in amdkfd_fence_enable_signaling()
154 if (WARN_ON(!fence)) in amdkfd_fence_release()
157 mmdrop(fence->mm); in amdkfd_fence_release()
[all …]
/linux/drivers/gpu/drm/vmwgfx/
A Dvmwgfx_fence.c121 fence->destroy(fence); in vmw_fence_obj_destroy()
541 fence = kzalloc(sizeof(*fence), GFP_KERNEL); in vmw_fence_create()
550 *p_fence = fence; in vmw_fence_create()
554 kfree(fence); in vmw_fence_create()
562 container_of(fence, struct vmw_user_fence, fence); in vmw_user_fence_destroy()
578 struct vmw_fence_obj *fence = &ufence->fence; in vmw_user_fence_base_release() local
813 fence = &(container_of(base, struct vmw_user_fence, base)->fence); in vmw_fence_obj_wait_ioctl()
854 fence = &(container_of(base, struct vmw_user_fence, base)->fence); in vmw_fence_obj_signaled_ioctl()
1023 eaction->fence = vmw_fence_obj_reference(fence); in vmw_event_fence_action_queue()
1138 if (!fence) { in vmw_fence_event_ioctl()
[all …]
/linux/include/trace/events/
A Ddma_fence.h16 TP_ARGS(fence),
19 __string(driver, fence->ops->get_driver_name(fence))
20 __string(timeline, fence->ops->get_timeline_name(fence))
26 __assign_str(driver, fence->ops->get_driver_name(fence));
27 __assign_str(timeline, fence->ops->get_timeline_name(fence));
41 TP_ARGS(fence)
48 TP_ARGS(fence)
55 TP_ARGS(fence)
62 TP_ARGS(fence)
69 TP_ARGS(fence)
[all …]
/linux/drivers/gpu/drm/scheduler/
A Dsched_fence.c53 dma_fence_signal(&fence->scheduled); in drm_sched_fence_scheduled()
58 dma_fence_signal(&fence->finished); in drm_sched_fence_finished()
77 if (!WARN_ON_ONCE(!fence)) in drm_sched_fence_free_rcu()
92 if (!WARN_ON_ONCE(fence->sched)) in drm_sched_fence_free()
108 dma_fence_put(fence->parent); in drm_sched_fence_release_scheduled()
123 dma_fence_put(&fence->scheduled); in drm_sched_fence_release_finished()
156 if (fence == NULL) in drm_sched_fence_alloc()
159 fence->owner = owner; in drm_sched_fence_alloc()
160 spin_lock_init(&fence->lock); in drm_sched_fence_alloc()
162 return fence; in drm_sched_fence_alloc()
[all …]
A Dgpu_scheduler_trace.h40 __field(struct dma_fence *, fence)
58 __entry->fence, __entry->name,
85 __entry->fence, __entry->name,
91 TP_ARGS(fence),
93 __field(struct dma_fence *, fence)
97 __entry->fence = &fence->finished;
104 TP_ARGS(sched_job, fence),
116 __entry->fence = fence;
117 __entry->ctx = fence->context;
118 __entry->seqno = fence->seqno;
[all …]
/linux/drivers/gpu/drm/i915/selftests/
A Dlib_sw_fence.c42 atomic_set(&fence->pending, 1); in __onstack_fence_init()
43 fence->error = 0; in __onstack_fence_init()
49 if (!fence->flags) in onstack_fence_fini()
52 i915_sw_fence_commit(fence); in onstack_fence_fini()
53 i915_sw_fence_fini(fence); in onstack_fence_fini()
85 struct i915_sw_fence fence; member
95 struct heap_fence *h = container_of(fence, typeof(*h), fence); in heap_fence_notify()
102 heap_fence_put(&h->fence); in heap_fence_notify()
119 return &h->fence; in heap_fence_create()
126 i915_sw_fence_fini(&h->fence); in heap_fence_release()
[all …]
/linux/drivers/gpu/drm/radeon/
A Dradeon_fence.c177 seq = atomic64_read(&fence->rdev->fence_drv[fence->ring].last_seq); in radeon_fence_check_signaled()
180 radeon_irq_kms_sw_irq_put(fence->rdev, fence->ring); in radeon_fence_check_signaled()
431 if (!fence) in radeon_fence_signaled()
434 if (radeon_fence_seq_signaled(fence->rdev, fence->seq, fence->ring)) { in radeon_fence_signaled()
550 seq[fence->ring] = fence->seq; in radeon_fence_wait_timeout()
692 return fence; in radeon_fence_ref()
706 *fence = NULL; in radeon_fence_unref()
754 if (!fence) { in radeon_fence_need_sync()
764 if (fence->seq <= fdrv->sync_seq[fence->ring]) { in radeon_fence_need_sync()
785 if (!fence) { in radeon_fence_note_sync()
[all …]
A Dradeon_sync.c63 struct radeon_fence *fence) in radeon_sync_fence() argument
67 if (!fence) in radeon_sync_fence()
70 other = sync->sync_to[fence->ring]; in radeon_sync_fence()
71 sync->sync_to[fence->ring] = radeon_fence_later(fence, other); in radeon_sync_fence()
73 if (fence->is_vm_update) { in radeon_sync_fence()
96 struct radeon_fence *fence; in radeon_sync_resv() local
103 if (fence && fence->rdev == rdev) in radeon_sync_resv()
104 radeon_sync_fence(sync, fence); in radeon_sync_resv()
115 fence = to_radeon_fence(f); in radeon_sync_resv()
116 if (fence && fence->rdev == rdev) in radeon_sync_resv()
[all …]
/linux/drivers/gpu/drm/
A Ddrm_syncobj.c260 wait->fence = fence; in drm_syncobj_fence_add_wait()
327 if (fence) in drm_syncobj_replace_fence()
334 rcu_assign_pointer(syncobj->fence, fence); in drm_syncobj_replace_fence()
451 *fence = wait.fence; in drm_syncobj_find_fence()
513 if (fence) in drm_syncobj_create()
689 if (!fence) in drm_syncobj_import_sync_file_fence()
945 fence = rcu_dereference_protected(syncobj->fence, in syncobj_wait_syncobj_func()
954 wait->fence = fence; in syncobj_wait_syncobj_func()
1016 if (fence) in drm_syncobj_array_wait_timeout()
1017 entries[i].fence = fence; in drm_syncobj_array_wait_timeout()
[all …]
/linux/tools/testing/selftests/sync/
A Dsync_stress_consumer.c40 static int busy_wait_on_fence(int fence) in busy_wait_on_fence() argument
66 int fence, valid, i; in mpsc_producer_thread() local
73 valid = sw_sync_fence_is_valid(fence); in mpsc_producer_thread()
82 ASSERT(sync_wait(fence, -1) > 0, in mpsc_producer_thread()
100 sw_sync_fence_destroy(fence); in mpsc_producer_thread()
108 int fence, merged, tmp, valid, it, i; in mpcs_consumer_thread() local
121 sw_sync_fence_destroy(fence); in mpcs_consumer_thread()
122 fence = merged; in mpcs_consumer_thread()
125 valid = sw_sync_fence_is_valid(fence); in mpcs_consumer_thread()
133 ASSERT(sync_wait(fence, -1) > 0, in mpcs_consumer_thread()
[all …]
/linux/drivers/gpu/drm/v3d/
A Dv3d_fence.c8 struct v3d_fence *fence; in v3d_fence_create() local
10 fence = kzalloc(sizeof(*fence), GFP_KERNEL); in v3d_fence_create()
11 if (!fence) in v3d_fence_create()
14 fence->dev = &v3d->drm; in v3d_fence_create()
15 fence->queue = queue; in v3d_fence_create()
16 fence->seqno = ++v3d->queue[queue].emit_seqno; in v3d_fence_create()
17 dma_fence_init(&fence->base, &v3d_fence_ops, &v3d->job_lock, in v3d_fence_create()
18 v3d->queue[queue].fence_context, fence->seqno); in v3d_fence_create()
20 return &fence->base; in v3d_fence_create()
23 static const char *v3d_fence_get_driver_name(struct dma_fence *fence) in v3d_fence_get_driver_name() argument
[all …]
/linux/drivers/gpu/drm/virtio/
A Dvirtgpu_fence.c83 if (!fence) in virtio_gpu_fence_alloc()
84 return fence; in virtio_gpu_fence_alloc()
86 fence->drv = drv; in virtio_gpu_fence_alloc()
87 fence->ring_idx = ring_idx; in virtio_gpu_fence_alloc()
98 return fence; in virtio_gpu_fence_alloc()
103 struct virtio_gpu_fence *fence) in virtio_gpu_fence_emit() argument
109 fence->fence_id = fence->f.seqno = ++drv->current_fence_id; in virtio_gpu_fence_emit()
110 dma_fence_get(&fence->f); in virtio_gpu_fence_emit()
114 trace_dma_fence_emit(&fence->f); in virtio_gpu_fence_emit()
120 if (fence->emit_fence_info) { in virtio_gpu_fence_emit()
[all …]

Completed in 56 milliseconds

12345678910>>...14