Lines Matching refs:work

61 		struct work_struct work;  member
164 struct io_wq_work *work) in io_work_get_acct() argument
166 return io_get_acct(wqe, !(work->flags & IO_WQ_WORK_UNBOUND)); in io_work_get_acct()
409 struct io_wq_work *work) in __io_worker_busy() argument
434 static inline unsigned int io_get_work_hash(struct io_wq_work *work) in io_get_work_hash() argument
436 return work->flags >> IO_WQ_HASH_SHIFT; in io_get_work_hash()
462 struct io_wq_work *work, *tail; in io_get_next_work() local
469 work = container_of(node, struct io_wq_work, list); in io_get_next_work()
472 if (!io_wq_is_hashed(work)) { in io_get_next_work()
474 return work; in io_get_next_work()
477 hash = io_get_work_hash(work); in io_get_next_work()
485 return work; in io_get_next_work()
525 struct io_wq_work *work) in io_assign_current_work() argument
527 if (work) { in io_assign_current_work()
533 worker->cur_work = work; in io_assign_current_work()
537 static void io_wqe_enqueue(struct io_wqe *wqe, struct io_wq_work *work);
548 struct io_wq_work *work; in io_worker_handle_work() local
557 work = io_get_next_work(acct, worker); in io_worker_handle_work()
558 if (work) in io_worker_handle_work()
559 __io_worker_busy(wqe, worker, work); in io_worker_handle_work()
562 if (!work) in io_worker_handle_work()
564 io_assign_current_work(worker, work); in io_worker_handle_work()
570 unsigned int hash = io_get_work_hash(work); in io_worker_handle_work()
572 next_hashed = wq_next_work(work); in io_worker_handle_work()
574 if (unlikely(do_kill) && (work->flags & IO_WQ_WORK_UNBOUND)) in io_worker_handle_work()
575 work->flags |= IO_WQ_WORK_CANCEL; in io_worker_handle_work()
576 wq->do_work(work); in io_worker_handle_work()
579 linked = wq->free_work(work); in io_worker_handle_work()
580 work = next_hashed; in io_worker_handle_work()
581 if (!work && linked && !io_wq_is_hashed(linked)) { in io_worker_handle_work()
582 work = linked; in io_worker_handle_work()
585 io_assign_current_work(worker, work); in io_worker_handle_work()
599 if (!work) in io_worker_handle_work()
603 } while (work); in io_worker_handle_work()
723 static bool io_wq_work_match_all(struct io_wq_work *work, void *data) in io_wq_work_match_all() argument
785 schedule_work(&worker->work); in create_worker_cont()
788 static void io_workqueue_create(struct work_struct *work) in io_workqueue_create() argument
790 struct io_worker *worker = container_of(work, struct io_worker, work); in io_workqueue_create()
831 INIT_WORK(&worker->work, io_workqueue_create); in create_io_worker()
832 schedule_work(&worker->work); in create_io_worker()
870 static void io_run_cancel(struct io_wq_work *work, struct io_wqe *wqe) in io_run_cancel() argument
875 work->flags |= IO_WQ_WORK_CANCEL; in io_run_cancel()
876 wq->do_work(work); in io_run_cancel()
877 work = wq->free_work(work); in io_run_cancel()
878 } while (work); in io_run_cancel()
881 static void io_wqe_insert_work(struct io_wqe *wqe, struct io_wq_work *work) in io_wqe_insert_work() argument
883 struct io_wqe_acct *acct = io_work_get_acct(wqe, work); in io_wqe_insert_work()
887 if (!io_wq_is_hashed(work)) { in io_wqe_insert_work()
889 wq_list_add_tail(&work->list, &acct->work_list); in io_wqe_insert_work()
893 hash = io_get_work_hash(work); in io_wqe_insert_work()
895 wqe->hash_tail[hash] = work; in io_wqe_insert_work()
899 wq_list_add_after(&work->list, &tail->list, &acct->work_list); in io_wqe_insert_work()
902 static bool io_wq_work_match_item(struct io_wq_work *work, void *data) in io_wq_work_match_item() argument
904 return work == data; in io_wq_work_match_item()
907 static void io_wqe_enqueue(struct io_wqe *wqe, struct io_wq_work *work) in io_wqe_enqueue() argument
909 struct io_wqe_acct *acct = io_work_get_acct(wqe, work); in io_wqe_enqueue()
910 unsigned work_flags = work->flags; in io_wqe_enqueue()
918 (work->flags & IO_WQ_WORK_CANCEL)) { in io_wqe_enqueue()
919 io_run_cancel(work, wqe); in io_wqe_enqueue()
924 io_wqe_insert_work(wqe, work); in io_wqe_enqueue()
946 .data = work, in io_wqe_enqueue()
957 void io_wq_enqueue(struct io_wq *wq, struct io_wq_work *work) in io_wq_enqueue() argument
961 io_wqe_enqueue(wqe, work); in io_wq_enqueue()
968 void io_wq_hash_work(struct io_wq_work *work, void *val) in io_wq_hash_work() argument
973 work->flags |= (IO_WQ_WORK_HASHED | (bit << IO_WQ_HASH_SHIFT)); in io_wq_hash_work()
996 struct io_wq_work *work, in io_wqe_remove_pending() argument
999 struct io_wqe_acct *acct = io_work_get_acct(wqe, work); in io_wqe_remove_pending()
1000 unsigned int hash = io_get_work_hash(work); in io_wqe_remove_pending()
1003 if (io_wq_is_hashed(work) && work == wqe->hash_tail[hash]) { in io_wqe_remove_pending()
1011 wq_list_del(&acct->work_list, &work->list, prev); in io_wqe_remove_pending()
1020 struct io_wq_work *work; in io_acct_cancel_pending_work() local
1023 work = container_of(node, struct io_wq_work, list); in io_acct_cancel_pending_work()
1024 if (!match->fn(work, match->data)) in io_acct_cancel_pending_work()
1026 io_wqe_remove_pending(wqe, work, prev); in io_acct_cancel_pending_work()
1028 io_run_cancel(work, wqe); in io_acct_cancel_pending_work()