Lines Matching refs:bdev_get_queue
905 static inline struct request_queue *bdev_get_queue(struct block_device *bdev) in bdev_get_queue() function
1183 return &bdev_get_queue(bdev)->limits; in bdev_limits()
1233 return queue_emulates_zone_append(bdev_get_queue(bdev)); in bdev_emulates_zone_append()
1244 return queue_max_segments(bdev_get_queue(bdev)); in bdev_max_segments()
1254 return queue_logical_block_size(bdev_get_queue(bdev)); in bdev_logical_block_size()
1264 return queue_physical_block_size(bdev_get_queue(bdev)); in bdev_physical_block_size()
1274 return queue_io_min(bdev_get_queue(bdev)); in bdev_io_min()
1284 return queue_io_opt(bdev_get_queue(bdev)); in bdev_io_opt()
1296 return queue_zone_write_granularity(bdev_get_queue(bdev)); in bdev_zone_write_granularity()
1325 return blk_queue_nonrot(bdev_get_queue(bdev)); in bdev_nonrot()
1335 struct request_queue *q = bdev_get_queue(bdev); in bdev_stable_writes()
1351 return blk_queue_write_cache(bdev_get_queue(bdev)); in bdev_write_cache()
1366 return blk_queue_is_zoned(bdev_get_queue(bdev)); in bdev_is_zoned()
1376 struct request_queue *q = bdev_get_queue(bdev); in bdev_zone_sectors()
1462 return queue_dma_alignment(bdev_get_queue(bdev)); in bdev_dma_alignment()
1737 return queue_atomic_write_unit_min_bytes(bdev_get_queue(bdev)); in bdev_atomic_write_unit_min_bytes()
1745 return queue_atomic_write_unit_max_bytes(bdev_get_queue(bdev)); in bdev_atomic_write_unit_max_bytes()