/linux/include/linux/ |
A D | blkdev.h | 218 unsigned long queue_flags; member 418 #define blk_queue_dying(q) test_bit(QUEUE_FLAG_DYING, &(q)->queue_flags) 419 #define blk_queue_dead(q) test_bit(QUEUE_FLAG_DEAD, &(q)->queue_flags) 423 test_bit(QUEUE_FLAG_NOXMERGES, &(q)->queue_flags) 426 test_bit(QUEUE_FLAG_STABLE_WRITES, &(q)->queue_flags) 431 test_bit(QUEUE_FLAG_ZONE_RESETALL, &(q)->queue_flags) 433 (test_bit(QUEUE_FLAG_SECERASE, &(q)->queue_flags)) 434 #define blk_queue_dax(q) test_bit(QUEUE_FLAG_DAX, &(q)->queue_flags) 436 test_bit(QUEUE_FLAG_PCI_P2PDMA, &(q)->queue_flags) 439 test_bit(QUEUE_FLAG_RQ_ALLOC_TIME, &(q)->queue_flags) [all …]
|
A D | blk-mq.h | 828 test_bit(QUEUE_FLAG_FAIL_IO, &q->queue_flags)) in blk_should_fake_timeout()
|
/linux/block/ |
A D | blk-sysfs.c | 282 bit = test_bit(QUEUE_FLAG_##flag, &q->queue_flags); \ 363 bool set = test_bit(QUEUE_FLAG_SAME_COMP, &q->queue_flags); in queue_rq_affinity_show() 364 bool force = test_bit(QUEUE_FLAG_SAME_FORCE, &q->queue_flags); in queue_rq_affinity_show() 430 return queue_var_show(test_bit(QUEUE_FLAG_POLL, &q->queue_flags), page); in queue_poll_show() 436 if (!test_bit(QUEUE_FLAG_POLL, &q->queue_flags)) in queue_poll_store() 517 if (test_bit(QUEUE_FLAG_WC, &q->queue_flags)) in queue_wc_show() 547 return sprintf(page, "%u\n", test_bit(QUEUE_FLAG_FUA, &q->queue_flags)); in queue_fua_show() 788 if (test_bit(QUEUE_FLAG_POLL_STATS, &q->queue_flags)) in blk_release_queue()
|
A D | blk-core.c | 83 set_bit(flag, &q->queue_flags); in blk_queue_flag_set() 94 clear_bit(flag, &q->queue_flags); in blk_queue_flag_clear() 108 return test_and_set_bit(flag, &q->queue_flags); in blk_queue_flag_test_and_set() 757 !test_bit(QUEUE_FLAG_WC, &q->queue_flags)) { in submit_bio_checks() 765 if (!test_bit(QUEUE_FLAG_POLL, &q->queue_flags)) in submit_bio_checks() 1036 !test_bit(QUEUE_FLAG_POLL, &q->queue_flags)) in bio_poll()
|
A D | blk-mq-tag.c | 30 if (!test_bit(QUEUE_FLAG_HCTX_ACTIVE, &q->queue_flags) && in __blk_mq_tag_busy() 31 !test_and_set_bit(QUEUE_FLAG_HCTX_ACTIVE, &q->queue_flags)) in __blk_mq_tag_busy() 64 &q->queue_flags)) in __blk_mq_tag_idle()
|
A D | blk-timeout.c | 43 int set = test_bit(QUEUE_FLAG_FAIL_IO, &disk->queue->queue_flags); in part_timeout_show()
|
A D | blk-mq.h | 361 if (!test_bit(QUEUE_FLAG_HCTX_ACTIVE, &q->queue_flags)) in hctx_may_queue()
|
A D | blk-flush.c | 385 unsigned long fflags = q->queue_flags; /* may change, cache */ in blk_insert_flush()
|
A D | blk-mq.c | 916 !test_bit(QUEUE_FLAG_SAME_COMP, &rq->q->queue_flags)) in blk_mq_complete_need_ipi() 929 (!test_bit(QUEUE_FLAG_SAME_FORCE, &rq->q->queue_flags) && in blk_mq_complete_need_ipi() 1033 if (test_bit(QUEUE_FLAG_STATS, &q->queue_flags)) { in blk_mq_start_request() 3740 q->queue_flags |= QUEUE_FLAG_MQ_DEFAULT; in blk_mq_init_allocated_queue() 4249 if (test_bit(QUEUE_FLAG_POLL_STATS, &q->queue_flags) || in blk_poll_stats_enable() 4262 if (!test_bit(QUEUE_FLAG_POLL_STATS, &q->queue_flags) || in blk_mq_poll_stats_start()
|
A D | blk-settings.c | 803 wbt_set_write_cache(q, test_bit(QUEUE_FLAG_WC, &q->queue_flags)); in blk_queue_write_cache()
|
A D | blk-wbt.c | 855 wbt_set_write_cache(q, test_bit(QUEUE_FLAG_WC, &q->queue_flags)); in wbt_init()
|
A D | blk-mq-debugfs.c | 140 blk_flags_show(m, q->queue_flags, blk_queue_flag_name, in queue_state_show()
|
/linux/fs/xfs/ |
A D | xfs_bio_io.c | 34 if (!test_bit(QUEUE_FLAG_WC, &q->queue_flags)) { in xfs_flush_bdev_async()
|
/linux/drivers/target/ |
A D | target_core_iblock.c | 741 if (test_bit(QUEUE_FLAG_FUA, &q->queue_flags)) { in iblock_execute_rw() 744 else if (!test_bit(QUEUE_FLAG_WC, &q->queue_flags)) in iblock_execute_rw() 897 return test_bit(QUEUE_FLAG_WC, &q->queue_flags); in iblock_get_write_cache()
|
/linux/kernel/sched/ |
A D | core.c | 7202 int queue_flags = DEQUEUE_SAVE | DEQUEUE_MOVE | DEQUEUE_NOCLOCK; in __sched_setscheduler() local 7406 queue_flags &= ~DEQUEUE_MOVE; in __sched_setscheduler() 7412 dequeue_task(rq, p, queue_flags); in __sched_setscheduler() 7430 queue_flags |= ENQUEUE_HEAD; in __sched_setscheduler() 7432 enqueue_task(rq, p, queue_flags); in __sched_setscheduler() 9843 int queued, running, queue_flags = in sched_move_task() local 9855 dequeue_task(rq, tsk, queue_flags); in sched_move_task() 9862 enqueue_task(rq, tsk, queue_flags); in sched_move_task()
|
/linux/drivers/block/rnbd/ |
A D | rnbd-srv.c | 573 if (test_bit(QUEUE_FLAG_WC, &q->queue_flags)) in rnbd_srv_fill_msg_open_rsp()
|
/linux/drivers/block/xen-blkback/ |
A D | xenbus.c | 519 if (q && test_bit(QUEUE_FLAG_WC, &q->queue_flags)) in xen_vbd_create()
|
/linux/drivers/md/ |
A D | raid5-ppl.c | 1326 if (test_bit(QUEUE_FLAG_WC, &q->queue_flags)) in ppl_init_child_log()
|
A D | dm-table.c | 1763 return (q->queue_flags & flush); in device_flush_capable()
|
A D | raid5-cache.c | 3099 log->need_cache_flush = test_bit(QUEUE_FLAG_WC, &q->queue_flags) != 0; in r5l_init_log()
|
/linux/drivers/block/ |
A D | loop.c | 1118 if (test_bit(QUEUE_FLAG_WC, &lo->lo_queue->queue_flags)) in __loop_clr_fd()
|
/linux/fs/btrfs/ |
A D | disk-io.c | 4003 if (!test_bit(QUEUE_FLAG_WC, &q->queue_flags)) in write_dev_flush()
|