Lines Matching full:req

38 static inline bool io_is_timeout_noseq(struct io_kiocb *req)  in io_is_timeout_noseq()  argument
40 struct io_timeout *timeout = io_kiocb_to_cmd(req, struct io_timeout); in io_is_timeout_noseq()
41 struct io_timeout_data *data = req->async_data; in io_is_timeout_noseq()
46 static inline void io_put_req(struct io_kiocb *req) in io_put_req() argument
48 if (req_ref_put_and_test(req)) { in io_put_req()
49 io_queue_next(req); in io_put_req()
50 io_free_req(req); in io_put_req()
68 static void io_timeout_complete(struct io_kiocb *req, struct io_tw_state *ts) in io_timeout_complete() argument
70 struct io_timeout *timeout = io_kiocb_to_cmd(req, struct io_timeout); in io_timeout_complete()
71 struct io_timeout_data *data = req->async_data; in io_timeout_complete()
72 struct io_ring_ctx *ctx = req->ctx; in io_timeout_complete()
75 if (io_req_post_cqe(req, -ETIME, IORING_CQE_F_MORE)) { in io_timeout_complete()
85 io_req_task_complete(req, ts); in io_timeout_complete()
95 struct io_kiocb *req; in io_flush_killed_timeouts() local
99 req = cmd_to_io_kiocb(timeout); in io_flush_killed_timeouts()
101 req_set_fail(req); in io_flush_killed_timeouts()
102 io_req_queue_tw_complete(req, err); in io_flush_killed_timeouts()
108 static void io_kill_timeout(struct io_kiocb *req, struct list_head *list) in io_kill_timeout() argument
109 __must_hold(&req->ctx->timeout_lock) in io_kill_timeout()
111 struct io_timeout_data *io = req->async_data; in io_kill_timeout()
114 struct io_timeout *timeout = io_kiocb_to_cmd(req, struct io_timeout); in io_kill_timeout()
116 atomic_set(&req->ctx->cq_timeouts, in io_kill_timeout()
117 atomic_read(&req->ctx->cq_timeouts) + 1); in io_kill_timeout()
132 struct io_kiocb *req = cmd_to_io_kiocb(timeout); in io_flush_timeouts() local
135 if (io_is_timeout_noseq(req)) in io_flush_timeouts()
150 io_kill_timeout(req, &list); in io_flush_timeouts()
173 static void io_fail_links(struct io_kiocb *req) in io_fail_links() argument
174 __must_hold(&req->ctx->completion_lock) in io_fail_links()
176 struct io_kiocb *link = req->link; in io_fail_links()
177 bool ignore_cqes = req->flags & REQ_F_SKIP_LINK_CQES; in io_fail_links()
187 trace_io_uring_fail_link(req, link); in io_fail_links()
191 link = req->link; in io_fail_links()
194 req->link = NULL; in io_fail_links()
197 static inline void io_remove_next_linked(struct io_kiocb *req) in io_remove_next_linked() argument
199 struct io_kiocb *nxt = req->link; in io_remove_next_linked()
201 req->link = nxt->link; in io_remove_next_linked()
205 void io_disarm_next(struct io_kiocb *req) in io_disarm_next() argument
206 __must_hold(&req->ctx->completion_lock) in io_disarm_next()
210 if (req->flags & REQ_F_ARM_LTIMEOUT) { in io_disarm_next()
211 link = req->link; in io_disarm_next()
212 req->flags &= ~REQ_F_ARM_LTIMEOUT; in io_disarm_next()
214 io_remove_next_linked(req); in io_disarm_next()
217 } else if (req->flags & REQ_F_LINK_TIMEOUT) { in io_disarm_next()
218 struct io_ring_ctx *ctx = req->ctx; in io_disarm_next()
221 link = io_disarm_linked_timeout(req); in io_disarm_next()
226 if (unlikely((req->flags & REQ_F_FAIL) && in io_disarm_next()
227 !(req->flags & REQ_F_HARDLINK))) in io_disarm_next()
228 io_fail_links(req); in io_disarm_next()
231 struct io_kiocb *__io_disarm_linked_timeout(struct io_kiocb *req, in __io_disarm_linked_timeout() argument
233 __must_hold(&req->ctx->completion_lock) in __io_disarm_linked_timeout()
234 __must_hold(&req->ctx->timeout_lock) in __io_disarm_linked_timeout()
239 io_remove_next_linked(req); in __io_disarm_linked_timeout()
253 struct io_kiocb *req = data->req; in io_timeout_fn() local
254 struct io_timeout *timeout = io_kiocb_to_cmd(req, struct io_timeout); in io_timeout_fn()
255 struct io_ring_ctx *ctx = req->ctx; in io_timeout_fn()
260 atomic_set(&req->ctx->cq_timeouts, in io_timeout_fn()
261 atomic_read(&req->ctx->cq_timeouts) + 1); in io_timeout_fn()
265 req_set_fail(req); in io_timeout_fn()
267 io_req_set_res(req, -ETIME, 0); in io_timeout_fn()
268 req->io_task_work.func = io_timeout_complete; in io_timeout_fn()
269 io_req_task_work_add(req); in io_timeout_fn()
279 struct io_kiocb *req = NULL; in io_timeout_extract() local
285 req = tmp; in io_timeout_extract()
289 if (!req) in io_timeout_extract()
292 io = req->async_data; in io_timeout_extract()
295 timeout = io_kiocb_to_cmd(req, struct io_timeout); in io_timeout_extract()
297 return req; in io_timeout_extract()
303 struct io_kiocb *req; in io_timeout_cancel() local
306 req = io_timeout_extract(ctx, cd); in io_timeout_cancel()
309 if (IS_ERR(req)) in io_timeout_cancel()
310 return PTR_ERR(req); in io_timeout_cancel()
311 io_req_task_queue_fail(req, -ECANCELED); in io_timeout_cancel()
315 static void io_req_task_link_timeout(struct io_kiocb *req, struct io_tw_state *ts) in io_req_task_link_timeout() argument
317 struct io_timeout *timeout = io_kiocb_to_cmd(req, struct io_timeout); in io_req_task_link_timeout()
324 .ctx = req->ctx, in io_req_task_link_timeout()
328 ret = io_try_cancel(req->tctx, &cd, 0); in io_req_task_link_timeout()
332 io_req_set_res(req, ret ?: -ETIME, 0); in io_req_task_link_timeout()
333 io_req_task_complete(req, ts); in io_req_task_link_timeout()
336 io_req_set_res(req, -ETIME, 0); in io_req_task_link_timeout()
337 io_req_task_complete(req, ts); in io_req_task_link_timeout()
345 struct io_kiocb *prev, *req = data->req; in io_link_timeout_fn() local
346 struct io_timeout *timeout = io_kiocb_to_cmd(req, struct io_timeout); in io_link_timeout_fn()
347 struct io_ring_ctx *ctx = req->ctx; in io_link_timeout_fn()
367 req->io_task_work.func = io_req_task_link_timeout; in io_link_timeout_fn()
368 io_req_task_work_add(req); in io_link_timeout_fn()
394 struct io_kiocb *req = NULL; in io_linked_timeout_update() local
400 req = tmp; in io_linked_timeout_update()
404 if (!req) in io_linked_timeout_update()
407 io = req->async_data; in io_linked_timeout_update()
421 struct io_kiocb *req = io_timeout_extract(ctx, &cd); in io_timeout_update() local
422 struct io_timeout *timeout = io_kiocb_to_cmd(req, struct io_timeout); in io_timeout_update()
425 if (IS_ERR(req)) in io_timeout_update()
426 return PTR_ERR(req); in io_timeout_update()
429 data = req->async_data; in io_timeout_update()
439 int io_timeout_remove_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe) in io_timeout_remove_prep() argument
441 struct io_timeout_rem *tr = io_kiocb_to_cmd(req, struct io_timeout_rem); in io_timeout_remove_prep()
443 if (unlikely(req->flags & (REQ_F_FIXED_FILE | REQ_F_BUFFER_SELECT))) in io_timeout_remove_prep()
479 int io_timeout_remove(struct io_kiocb *req, unsigned int issue_flags) in io_timeout_remove() argument
481 struct io_timeout_rem *tr = io_kiocb_to_cmd(req, struct io_timeout_rem); in io_timeout_remove()
482 struct io_ring_ctx *ctx = req->ctx; in io_timeout_remove()
503 req_set_fail(req); in io_timeout_remove()
504 io_req_set_res(req, ret, 0); in io_timeout_remove()
508 static int __io_timeout_prep(struct io_kiocb *req, in __io_timeout_prep() argument
512 struct io_timeout *timeout = io_kiocb_to_cmd(req, struct io_timeout); in __io_timeout_prep()
535 if (unlikely(off && !req->ctx->off_timeout_used)) in __io_timeout_prep()
536 req->ctx->off_timeout_used = true; in __io_timeout_prep()
545 if (WARN_ON_ONCE(req_has_async_data(req))) in __io_timeout_prep()
547 data = io_uring_alloc_async_data(NULL, req); in __io_timeout_prep()
550 data->req = req; in __io_timeout_prep()
563 struct io_submit_link *link = &req->ctx->submit_state.link; in __io_timeout_prep()
575 int io_timeout_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe) in io_timeout_prep() argument
577 return __io_timeout_prep(req, sqe, false); in io_timeout_prep()
580 int io_link_timeout_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe) in io_link_timeout_prep() argument
582 return __io_timeout_prep(req, sqe, true); in io_link_timeout_prep()
585 int io_timeout(struct io_kiocb *req, unsigned int issue_flags) in io_timeout() argument
587 struct io_timeout *timeout = io_kiocb_to_cmd(req, struct io_timeout); in io_timeout()
588 struct io_ring_ctx *ctx = req->ctx; in io_timeout()
589 struct io_timeout_data *data = req->async_data; in io_timeout()
600 if (io_is_timeout_noseq(req)) { in io_timeout()
636 void io_queue_linked_timeout(struct io_kiocb *req) in io_queue_linked_timeout() argument
638 struct io_timeout *timeout = io_kiocb_to_cmd(req, struct io_timeout); in io_queue_linked_timeout()
639 struct io_ring_ctx *ctx = req->ctx; in io_queue_linked_timeout()
647 struct io_timeout_data *data = req->async_data; in io_queue_linked_timeout()
656 io_put_req(req); in io_queue_linked_timeout()
663 struct io_kiocb *req; in io_match_task() local
670 io_for_each_link(req, head) { in io_match_task()
671 if (req->flags & REQ_F_INFLIGHT) in io_match_task()
691 struct io_kiocb *req = cmd_to_io_kiocb(timeout); in io_kill_timeouts() local
693 if (io_match_task(req, tctx, cancel_all)) in io_kill_timeouts()
694 io_kill_timeout(req, &list); in io_kill_timeouts()