Jens Axboe | 7357298 | 2022-06-13 07:12:45 -0600 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0 |
| 2 | #ifndef IOU_RSRC_H |
| 3 | #define IOU_RSRC_H |
| 4 | |
Pavel Begunkov | 69bbc6a | 2023-04-04 13:39:57 +0100 | [diff] [blame] | 5 | #define IO_NODE_ALLOC_CACHE_MAX 32 |
| 6 | |
Jens Axboe | 7357298 | 2022-06-13 07:12:45 -0600 | [diff] [blame] | 7 | #define IO_RSRC_TAG_TABLE_SHIFT (PAGE_SHIFT - 3) |
| 8 | #define IO_RSRC_TAG_TABLE_MAX (1U << IO_RSRC_TAG_TABLE_SHIFT) |
| 9 | #define IO_RSRC_TAG_TABLE_MASK (IO_RSRC_TAG_TABLE_MAX - 1) |
| 10 | |
| 11 | enum { |
| 12 | IORING_RSRC_FILE = 0, |
| 13 | IORING_RSRC_BUFFER = 1, |
| 14 | }; |
| 15 | |
| 16 | struct io_rsrc_put { |
Jens Axboe | 7357298 | 2022-06-13 07:12:45 -0600 | [diff] [blame] | 17 | u64 tag; |
| 18 | union { |
| 19 | void *rsrc; |
| 20 | struct file *file; |
| 21 | struct io_mapped_ubuf *buf; |
| 22 | }; |
| 23 | }; |
| 24 | |
| 25 | typedef void (rsrc_put_fn)(struct io_ring_ctx *ctx, struct io_rsrc_put *prsrc); |
| 26 | |
| 27 | struct io_rsrc_data { |
| 28 | struct io_ring_ctx *ctx; |
| 29 | |
| 30 | u64 **tags; |
| 31 | unsigned int nr; |
Pavel Begunkov | fc7f3a8 | 2023-04-18 14:06:40 +0100 | [diff] [blame] | 32 | u16 rsrc_type; |
Jens Axboe | 7357298 | 2022-06-13 07:12:45 -0600 | [diff] [blame] | 33 | bool quiesce; |
| 34 | }; |
| 35 | |
| 36 | struct io_rsrc_node { |
Jens Axboe | 414d0f4 | 2024-03-20 15:19:44 -0600 | [diff] [blame] | 37 | struct io_ring_ctx *ctx; |
Pavel Begunkov | ef8ae64 | 2023-04-04 13:39:49 +0100 | [diff] [blame] | 38 | int refs; |
Pavel Begunkov | 26147da | 2023-04-18 14:06:37 +0100 | [diff] [blame] | 39 | bool empty; |
Pavel Begunkov | 2236b39 | 2023-04-18 14:06:41 +0100 | [diff] [blame] | 40 | u16 type; |
Pavel Begunkov | c376644 | 2023-04-18 14:06:36 +0100 | [diff] [blame] | 41 | struct list_head node; |
| 42 | struct io_rsrc_put item; |
Jens Axboe | 7357298 | 2022-06-13 07:12:45 -0600 | [diff] [blame] | 43 | }; |
| 44 | |
Jens Axboe | ad163a7 | 2022-06-18 19:44:33 -0600 | [diff] [blame] | 45 | struct io_mapped_ubuf { |
| 46 | u64 ubuf; |
| 47 | u64 ubuf_end; |
| 48 | unsigned int nr_bvecs; |
| 49 | unsigned long acct_pages; |
Kees Cook | 04d9244 | 2023-08-17 14:21:47 -0700 | [diff] [blame] | 50 | struct bio_vec bvec[] __counted_by(nr_bvecs); |
Jens Axboe | ad163a7 | 2022-06-18 19:44:33 -0600 | [diff] [blame] | 51 | }; |
| 52 | |
Pavel Begunkov | b8fb5b4 | 2023-04-04 13:39:45 +0100 | [diff] [blame] | 53 | void io_rsrc_node_ref_zero(struct io_rsrc_node *node); |
Pavel Begunkov | 9eae865 | 2023-04-04 13:39:54 +0100 | [diff] [blame] | 54 | void io_rsrc_node_destroy(struct io_ring_ctx *ctx, struct io_rsrc_node *ref_node); |
Pavel Begunkov | 2933ae6 | 2023-04-11 12:06:07 +0100 | [diff] [blame] | 55 | struct io_rsrc_node *io_rsrc_node_alloc(struct io_ring_ctx *ctx); |
Pavel Begunkov | 63fea89 | 2023-04-18 14:06:35 +0100 | [diff] [blame] | 56 | int io_queue_rsrc_removal(struct io_rsrc_data *data, unsigned idx, void *rsrc); |
Jens Axboe | 7357298 | 2022-06-13 07:12:45 -0600 | [diff] [blame] | 57 | |
Pavel Begunkov | c059f785 | 2022-06-20 01:25:59 +0100 | [diff] [blame] | 58 | int io_import_fixed(int ddir, struct iov_iter *iter, |
| 59 | struct io_mapped_ubuf *imu, |
| 60 | u64 buf_addr, size_t len); |
Jens Axboe | 7357298 | 2022-06-13 07:12:45 -0600 | [diff] [blame] | 61 | |
| 62 | void __io_sqe_buffers_unregister(struct io_ring_ctx *ctx); |
| 63 | int io_sqe_buffers_unregister(struct io_ring_ctx *ctx); |
| 64 | int io_sqe_buffers_register(struct io_ring_ctx *ctx, void __user *arg, |
| 65 | unsigned int nr_args, u64 __user *tags); |
| 66 | void __io_sqe_files_unregister(struct io_ring_ctx *ctx); |
| 67 | int io_sqe_files_unregister(struct io_ring_ctx *ctx); |
| 68 | int io_sqe_files_register(struct io_ring_ctx *ctx, void __user *arg, |
| 69 | unsigned nr_args, u64 __user *tags); |
| 70 | |
Jens Axboe | 7357298 | 2022-06-13 07:12:45 -0600 | [diff] [blame] | 71 | int io_register_files_update(struct io_ring_ctx *ctx, void __user *arg, |
| 72 | unsigned nr_args); |
| 73 | int io_register_rsrc_update(struct io_ring_ctx *ctx, void __user *arg, |
| 74 | unsigned size, unsigned type); |
| 75 | int io_register_rsrc(struct io_ring_ctx *ctx, void __user *arg, |
| 76 | unsigned int size, unsigned int type); |
| 77 | |
Pavel Begunkov | 1f2c8f6 | 2023-04-04 13:39:55 +0100 | [diff] [blame] | 78 | static inline void io_put_rsrc_node(struct io_ring_ctx *ctx, struct io_rsrc_node *node) |
Jens Axboe | 7357298 | 2022-06-13 07:12:45 -0600 | [diff] [blame] | 79 | { |
Pavel Begunkov | 1f2c8f6 | 2023-04-04 13:39:55 +0100 | [diff] [blame] | 80 | lockdep_assert_held(&ctx->uring_lock); |
| 81 | |
Pavel Begunkov | ef8ae64 | 2023-04-04 13:39:49 +0100 | [diff] [blame] | 82 | if (node && !--node->refs) |
| 83 | io_rsrc_node_ref_zero(node); |
Jens Axboe | 7357298 | 2022-06-13 07:12:45 -0600 | [diff] [blame] | 84 | } |
| 85 | |
Pavel Begunkov | 8e15c0e | 2023-04-04 13:39:46 +0100 | [diff] [blame] | 86 | static inline void io_charge_rsrc_node(struct io_ring_ctx *ctx, |
| 87 | struct io_rsrc_node *node) |
Pavel Begunkov | 68ef557 | 2022-07-12 21:52:41 +0100 | [diff] [blame] | 88 | { |
Pavel Begunkov | ef8ae64 | 2023-04-04 13:39:49 +0100 | [diff] [blame] | 89 | node->refs++; |
Pavel Begunkov | 68ef557 | 2022-07-12 21:52:41 +0100 | [diff] [blame] | 90 | } |
| 91 | |
Jens Axboe | 3f30238 | 2024-01-11 13:34:33 -0700 | [diff] [blame] | 92 | static inline void __io_req_set_rsrc_node(struct io_kiocb *req, |
| 93 | struct io_ring_ctx *ctx) |
| 94 | { |
| 95 | lockdep_assert_held(&ctx->uring_lock); |
| 96 | req->rsrc_node = ctx->rsrc_node; |
| 97 | io_charge_rsrc_node(ctx, ctx->rsrc_node); |
| 98 | } |
| 99 | |
Jens Axboe | 7357298 | 2022-06-13 07:12:45 -0600 | [diff] [blame] | 100 | static inline void io_req_set_rsrc_node(struct io_kiocb *req, |
| 101 | struct io_ring_ctx *ctx, |
| 102 | unsigned int issue_flags) |
| 103 | { |
| 104 | if (!req->rsrc_node) { |
Pavel Begunkov | 4ff0b50 | 2023-03-29 15:03:43 +0100 | [diff] [blame] | 105 | io_ring_submit_lock(ctx, issue_flags); |
Jens Axboe | 3f30238 | 2024-01-11 13:34:33 -0700 | [diff] [blame] | 106 | __io_req_set_rsrc_node(req, ctx); |
Pavel Begunkov | 4ff0b50 | 2023-03-29 15:03:43 +0100 | [diff] [blame] | 107 | io_ring_submit_unlock(ctx, issue_flags); |
Jens Axboe | 7357298 | 2022-06-13 07:12:45 -0600 | [diff] [blame] | 108 | } |
| 109 | } |
| 110 | |
| 111 | static inline u64 *io_get_tag_slot(struct io_rsrc_data *data, unsigned int idx) |
| 112 | { |
| 113 | unsigned int off = idx & IO_RSRC_TAG_TABLE_MASK; |
| 114 | unsigned int table_idx = idx >> IO_RSRC_TAG_TABLE_SHIFT; |
| 115 | |
| 116 | return &data->tags[table_idx][off]; |
| 117 | } |
| 118 | |
Pavel Begunkov | 2933ae6 | 2023-04-11 12:06:07 +0100 | [diff] [blame] | 119 | static inline int io_rsrc_init(struct io_ring_ctx *ctx) |
| 120 | { |
| 121 | ctx->rsrc_node = io_rsrc_node_alloc(ctx); |
| 122 | return ctx->rsrc_node ? 0 : -ENOMEM; |
| 123 | } |
| 124 | |
Pavel Begunkov | d9808ce | 2022-09-01 11:54:02 +0100 | [diff] [blame] | 125 | int io_files_update(struct io_kiocb *req, unsigned int issue_flags); |
| 126 | int io_files_update_prep(struct io_kiocb *req, const struct io_uring_sqe *sqe); |
Pavel Begunkov | 6a9ce66 | 2022-07-25 10:52:05 +0100 | [diff] [blame] | 127 | |
| 128 | int __io_account_mem(struct user_struct *user, unsigned long nr_pages); |
| 129 | |
| 130 | static inline void __io_unaccount_mem(struct user_struct *user, |
| 131 | unsigned long nr_pages) |
| 132 | { |
| 133 | atomic_long_sub(nr_pages, &user->locked_vm); |
| 134 | } |
| 135 | |
Jens Axboe | 7357298 | 2022-06-13 07:12:45 -0600 | [diff] [blame] | 136 | #endif |