Greg Kroah-Hartman | b244131 | 2017-11-01 15:07:57 +0100 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0 |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 2 | /* |
| 3 | * Shared Memory Communications over RDMA (SMC-R) and RoCE |
| 4 | * |
| 5 | * Basic Transport Functions exploiting Infiniband API |
| 6 | * |
| 7 | * Copyright IBM Corp. 2016 |
| 8 | * |
| 9 | * Author(s): Ursula Braun <ubraun@linux.vnet.ibm.com> |
| 10 | */ |
| 11 | |
| 12 | #include <linux/socket.h> |
| 13 | #include <linux/if_vlan.h> |
| 14 | #include <linux/random.h> |
| 15 | #include <linux/workqueue.h> |
Ursula Braun | 6dabd40 | 2019-11-16 17:47:29 +0100 | [diff] [blame] | 16 | #include <linux/wait.h> |
Ursula Braun | a33a803 | 2019-11-16 17:47:30 +0100 | [diff] [blame] | 17 | #include <linux/reboot.h> |
Ursula Braun | 92f3cb0 | 2020-07-08 17:05:13 +0200 | [diff] [blame] | 18 | #include <linux/mutex.h> |
Guvenc Gulce | 099b990 | 2020-12-01 20:20:44 +0100 | [diff] [blame] | 19 | #include <linux/list.h> |
| 20 | #include <linux/smc.h> |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 21 | #include <net/tcp.h> |
| 22 | #include <net/sock.h> |
| 23 | #include <rdma/ib_verbs.h> |
Parav Pandit | ddb457c | 2018-06-05 08:40:19 +0300 | [diff] [blame] | 24 | #include <rdma/ib_cache.h> |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 25 | |
| 26 | #include "smc.h" |
| 27 | #include "smc_clc.h" |
| 28 | #include "smc_core.h" |
| 29 | #include "smc_ib.h" |
Ursula Braun | f38ba179 | 2017-01-09 16:55:19 +0100 | [diff] [blame] | 30 | #include "smc_wr.h" |
Ursula Braun | 9bf9abe | 2017-01-09 16:55:21 +0100 | [diff] [blame] | 31 | #include "smc_llc.h" |
Ursula Braun | 5f08318 | 2017-01-09 16:55:22 +0100 | [diff] [blame] | 32 | #include "smc_cdc.h" |
Ursula Braun | b38d732 | 2017-01-09 16:55:25 +0100 | [diff] [blame] | 33 | #include "smc_close.h" |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 34 | #include "smc_ism.h" |
Guvenc Gulce | 099b990 | 2020-12-01 20:20:44 +0100 | [diff] [blame] | 35 | #include "smc_netlink.h" |
Guvenc Gulce | e0e4b8f | 2021-06-16 16:52:55 +0200 | [diff] [blame] | 36 | #include "smc_stats.h" |
Tony Lu | a3a0e81 | 2021-11-01 15:39:16 +0800 | [diff] [blame] | 37 | #include "smc_tracepoint.h" |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 38 | |
Ursula Braun | 5bc11dd | 2017-09-21 09:16:31 +0200 | [diff] [blame] | 39 | #define SMC_LGR_NUM_INCR 256 |
| 40 | #define SMC_LGR_FREE_DELAY_SERV (600 * HZ) |
Ursula Braun | 7f58a1a | 2018-05-23 16:38:12 +0200 | [diff] [blame] | 41 | #define SMC_LGR_FREE_DELAY_CLNT (SMC_LGR_FREE_DELAY_SERV + 10 * HZ) |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 42 | |
Guvenc Gulce | a3db10e | 2020-12-01 20:20:49 +0100 | [diff] [blame] | 43 | struct smc_lgr_list smc_lgr_list = { /* established link groups */ |
Hans Wippel | 9fda351 | 2018-05-18 09:34:11 +0200 | [diff] [blame] | 44 | .lock = __SPIN_LOCK_UNLOCKED(smc_lgr_list.lock), |
| 45 | .list = LIST_HEAD_INIT(smc_lgr_list.list), |
| 46 | .num = 0, |
| 47 | }; |
Ursula Braun | 9bf9abe | 2017-01-09 16:55:21 +0100 | [diff] [blame] | 48 | |
Ursula Braun | 29115ce | 2019-12-12 22:35:41 +0100 | [diff] [blame] | 49 | static atomic_t lgr_cnt = ATOMIC_INIT(0); /* number of existing link groups */ |
Ursula Braun | 6dabd40 | 2019-11-16 17:47:29 +0100 | [diff] [blame] | 50 | static DECLARE_WAIT_QUEUE_HEAD(lgrs_deleted); |
| 51 | |
Hans Wippel | 6511aad | 2018-05-18 09:34:17 +0200 | [diff] [blame] | 52 | static void smc_buf_free(struct smc_link_group *lgr, bool is_rmb, |
| 53 | struct smc_buf_desc *buf_desc); |
Karsten Graul | 5f78fe9 | 2020-02-17 16:24:54 +0100 | [diff] [blame] | 54 | static void __smc_lgr_terminate(struct smc_link_group *lgr, bool soft); |
Karsten Graul | a6920d1 | 2018-05-03 17:57:38 +0200 | [diff] [blame] | 55 | |
Karsten Graul | 541afa1 | 2020-05-01 12:48:08 +0200 | [diff] [blame] | 56 | static void smc_link_down_work(struct work_struct *work); |
Karsten Graul | 1f90a05 | 2020-05-01 12:48:07 +0200 | [diff] [blame] | 57 | |
Ursula Braun | a0a62ee | 2019-10-09 10:07:44 +0200 | [diff] [blame] | 58 | /* return head of link group list and its lock for a given link group */ |
| 59 | static inline struct list_head *smc_lgr_list_head(struct smc_link_group *lgr, |
| 60 | spinlock_t **lgr_lock) |
| 61 | { |
| 62 | if (lgr->is_smcd) { |
| 63 | *lgr_lock = &lgr->smcd->lgr_lock; |
| 64 | return &lgr->smcd->lgr_list; |
| 65 | } |
| 66 | |
| 67 | *lgr_lock = &smc_lgr_list.lock; |
| 68 | return &smc_lgr_list.list; |
| 69 | } |
| 70 | |
Guvenc Gulce | ddc9928 | 2020-12-01 20:20:39 +0100 | [diff] [blame] | 71 | static void smc_ibdev_cnt_inc(struct smc_link *lnk) |
| 72 | { |
| 73 | atomic_inc(&lnk->smcibdev->lnk_cnt_by_port[lnk->ibport - 1]); |
| 74 | } |
| 75 | |
| 76 | static void smc_ibdev_cnt_dec(struct smc_link *lnk) |
| 77 | { |
| 78 | atomic_dec(&lnk->smcibdev->lnk_cnt_by_port[lnk->ibport - 1]); |
| 79 | } |
| 80 | |
Karsten Graul | 97cdbc4 | 2018-03-14 11:01:02 +0100 | [diff] [blame] | 81 | static void smc_lgr_schedule_free_work(struct smc_link_group *lgr) |
| 82 | { |
| 83 | /* client link group creation always follows the server link group |
| 84 | * creation. For client use a somewhat higher removal delay time, |
| 85 | * otherwise there is a risk of out-of-sync link groups. |
| 86 | */ |
Ursula Braun | f9aab6f | 2020-09-10 18:48:26 +0200 | [diff] [blame] | 87 | if (!lgr->freeing) { |
Ursula Braun | 8e316b9 | 2019-10-21 16:13:11 +0200 | [diff] [blame] | 88 | mod_delayed_work(system_wq, &lgr->free_work, |
| 89 | (!lgr->is_smcd && lgr->role == SMC_CLNT) ? |
| 90 | SMC_LGR_FREE_DELAY_CLNT : |
| 91 | SMC_LGR_FREE_DELAY_SERV); |
| 92 | } |
Karsten Graul | 97cdbc4 | 2018-03-14 11:01:02 +0100 | [diff] [blame] | 93 | } |
| 94 | |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 95 | /* Register connection's alert token in our lookup structure. |
| 96 | * To use rbtrees we have to implement our own insert core. |
| 97 | * Requires @conns_lock |
| 98 | * @smc connection to register |
| 99 | * Returns 0 on success, != otherwise. |
| 100 | */ |
| 101 | static void smc_lgr_add_alert_token(struct smc_connection *conn) |
| 102 | { |
| 103 | struct rb_node **link, *parent = NULL; |
| 104 | u32 token = conn->alert_token_local; |
| 105 | |
| 106 | link = &conn->lgr->conns_all.rb_node; |
| 107 | while (*link) { |
| 108 | struct smc_connection *cur = rb_entry(*link, |
| 109 | struct smc_connection, alert_node); |
| 110 | |
| 111 | parent = *link; |
| 112 | if (cur->alert_token_local > token) |
| 113 | link = &parent->rb_left; |
| 114 | else |
| 115 | link = &parent->rb_right; |
| 116 | } |
| 117 | /* Put the new node there */ |
| 118 | rb_link_node(&conn->alert_node, parent, link); |
| 119 | rb_insert_color(&conn->alert_node, &conn->lgr->conns_all); |
| 120 | } |
| 121 | |
Karsten Graul | 56bc3b2 | 2020-05-04 14:18:43 +0200 | [diff] [blame] | 122 | /* assign an SMC-R link to the connection */ |
| 123 | static int smcr_lgr_conn_assign_link(struct smc_connection *conn, bool first) |
| 124 | { |
| 125 | enum smc_link_state expected = first ? SMC_LNK_ACTIVATING : |
| 126 | SMC_LNK_ACTIVE; |
| 127 | int i, j; |
| 128 | |
| 129 | /* do link balancing */ |
Wen Gu | 3511227 | 2023-05-18 13:14:55 +0800 | [diff] [blame] | 130 | conn->lnk = NULL; /* reset conn->lnk first */ |
Karsten Graul | 56bc3b2 | 2020-05-04 14:18:43 +0200 | [diff] [blame] | 131 | for (i = 0; i < SMC_LINKS_PER_LGR_MAX; i++) { |
| 132 | struct smc_link *lnk = &conn->lgr->lnk[i]; |
| 133 | |
Karsten Graul | ad6c111b | 2020-05-04 14:18:44 +0200 | [diff] [blame] | 134 | if (lnk->state != expected || lnk->link_is_asym) |
Karsten Graul | 56bc3b2 | 2020-05-04 14:18:43 +0200 | [diff] [blame] | 135 | continue; |
| 136 | if (conn->lgr->role == SMC_CLNT) { |
| 137 | conn->lnk = lnk; /* temporary, SMC server assigns link*/ |
| 138 | break; |
| 139 | } |
| 140 | if (conn->lgr->conns_num % 2) { |
| 141 | for (j = i + 1; j < SMC_LINKS_PER_LGR_MAX; j++) { |
| 142 | struct smc_link *lnk2; |
| 143 | |
| 144 | lnk2 = &conn->lgr->lnk[j]; |
Karsten Graul | ad6c111b | 2020-05-04 14:18:44 +0200 | [diff] [blame] | 145 | if (lnk2->state == expected && |
| 146 | !lnk2->link_is_asym) { |
Karsten Graul | 56bc3b2 | 2020-05-04 14:18:43 +0200 | [diff] [blame] | 147 | conn->lnk = lnk2; |
| 148 | break; |
| 149 | } |
| 150 | } |
| 151 | } |
| 152 | if (!conn->lnk) |
| 153 | conn->lnk = lnk; |
| 154 | break; |
| 155 | } |
| 156 | if (!conn->lnk) |
| 157 | return SMC_CLC_DECL_NOACTLINK; |
Guvenc Gulce | 07d5158 | 2020-12-01 20:20:38 +0100 | [diff] [blame] | 158 | atomic_inc(&conn->lnk->conn_cnt); |
Karsten Graul | 56bc3b2 | 2020-05-04 14:18:43 +0200 | [diff] [blame] | 159 | return 0; |
| 160 | } |
| 161 | |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 162 | /* Register connection in link group by assigning an alert token |
| 163 | * registered in a search tree. |
| 164 | * Requires @conns_lock |
| 165 | * Note that '0' is a reserved value and not assigned. |
| 166 | */ |
Karsten Graul | 56bc3b2 | 2020-05-04 14:18:43 +0200 | [diff] [blame] | 167 | static int smc_lgr_register_conn(struct smc_connection *conn, bool first) |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 168 | { |
| 169 | struct smc_sock *smc = container_of(conn, struct smc_sock, conn); |
| 170 | static atomic_t nexttoken = ATOMIC_INIT(0); |
Karsten Graul | 56bc3b2 | 2020-05-04 14:18:43 +0200 | [diff] [blame] | 171 | int rc; |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 172 | |
Karsten Graul | 56bc3b2 | 2020-05-04 14:18:43 +0200 | [diff] [blame] | 173 | if (!conn->lgr->is_smcd) { |
| 174 | rc = smcr_lgr_conn_assign_link(conn, first); |
Wen Gu | 36595d8 | 2022-01-06 20:42:08 +0800 | [diff] [blame] | 175 | if (rc) { |
| 176 | conn->lgr = NULL; |
Karsten Graul | 56bc3b2 | 2020-05-04 14:18:43 +0200 | [diff] [blame] | 177 | return rc; |
Wen Gu | 36595d8 | 2022-01-06 20:42:08 +0800 | [diff] [blame] | 178 | } |
Karsten Graul | 56bc3b2 | 2020-05-04 14:18:43 +0200 | [diff] [blame] | 179 | } |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 180 | /* find a new alert_token_local value not yet used by some connection |
| 181 | * in this link group |
| 182 | */ |
| 183 | sock_hold(&smc->sk); /* sock_put in smc_lgr_unregister_conn() */ |
| 184 | while (!conn->alert_token_local) { |
| 185 | conn->alert_token_local = atomic_inc_return(&nexttoken); |
| 186 | if (smc_lgr_find_conn(conn->alert_token_local, conn->lgr)) |
| 187 | conn->alert_token_local = 0; |
| 188 | } |
| 189 | smc_lgr_add_alert_token(conn); |
| 190 | conn->lgr->conns_num++; |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 191 | return 0; |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 192 | } |
| 193 | |
| 194 | /* Unregister connection and reset the alert token of the given connection< |
| 195 | */ |
| 196 | static void __smc_lgr_unregister_conn(struct smc_connection *conn) |
| 197 | { |
| 198 | struct smc_sock *smc = container_of(conn, struct smc_sock, conn); |
| 199 | struct smc_link_group *lgr = conn->lgr; |
| 200 | |
| 201 | rb_erase(&conn->alert_node, &lgr->conns_all); |
Guvenc Gulce | 07d5158 | 2020-12-01 20:20:38 +0100 | [diff] [blame] | 202 | if (conn->lnk) |
| 203 | atomic_dec(&conn->lnk->conn_cnt); |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 204 | lgr->conns_num--; |
| 205 | conn->alert_token_local = 0; |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 206 | sock_put(&smc->sk); /* sock_hold in smc_lgr_register_conn() */ |
| 207 | } |
| 208 | |
Karsten Graul | fb692ec | 2018-10-25 13:25:28 +0200 | [diff] [blame] | 209 | /* Unregister connection from lgr |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 210 | */ |
| 211 | static void smc_lgr_unregister_conn(struct smc_connection *conn) |
| 212 | { |
| 213 | struct smc_link_group *lgr = conn->lgr; |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 214 | |
Wen Gu | ea89c6c | 2022-01-13 16:36:41 +0800 | [diff] [blame] | 215 | if (!smc_conn_lgr_valid(conn)) |
Karsten Graul | 77f838a | 2019-01-30 18:51:02 +0100 | [diff] [blame] | 216 | return; |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 217 | write_lock_bh(&lgr->conns_lock); |
| 218 | if (conn->alert_token_local) { |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 219 | __smc_lgr_unregister_conn(conn); |
| 220 | } |
| 221 | write_unlock_bh(&lgr->conns_lock); |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 222 | } |
| 223 | |
Guvenc Gulce | 099b990 | 2020-12-01 20:20:44 +0100 | [diff] [blame] | 224 | int smc_nl_get_sys_info(struct sk_buff *skb, struct netlink_callback *cb) |
| 225 | { |
| 226 | struct smc_nl_dmp_ctx *cb_ctx = smc_nl_dmp_ctx(cb); |
| 227 | char hostname[SMC_MAX_HOSTNAME_LEN + 1]; |
| 228 | char smc_seid[SMC_MAX_EID_LEN + 1]; |
Guvenc Gulce | 099b990 | 2020-12-01 20:20:44 +0100 | [diff] [blame] | 229 | struct nlattr *attrs; |
| 230 | u8 *seid = NULL; |
| 231 | u8 *host = NULL; |
| 232 | void *nlh; |
| 233 | |
| 234 | nlh = genlmsg_put(skb, NETLINK_CB(cb->skb).portid, cb->nlh->nlmsg_seq, |
| 235 | &smc_gen_nl_family, NLM_F_MULTI, |
| 236 | SMC_NETLINK_GET_SYS_INFO); |
| 237 | if (!nlh) |
| 238 | goto errmsg; |
| 239 | if (cb_ctx->pos[0]) |
| 240 | goto errout; |
| 241 | attrs = nla_nest_start(skb, SMC_GEN_SYS_INFO); |
| 242 | if (!attrs) |
| 243 | goto errout; |
| 244 | if (nla_put_u8(skb, SMC_NLA_SYS_VER, SMC_V2)) |
| 245 | goto errattr; |
| 246 | if (nla_put_u8(skb, SMC_NLA_SYS_REL, SMC_RELEASE)) |
| 247 | goto errattr; |
| 248 | if (nla_put_u8(skb, SMC_NLA_SYS_IS_ISM_V2, smc_ism_is_v2_capable())) |
| 249 | goto errattr; |
Karsten Graul | b0539f5e | 2021-10-16 11:37:51 +0200 | [diff] [blame] | 250 | if (nla_put_u8(skb, SMC_NLA_SYS_IS_SMCR_V2, true)) |
| 251 | goto errattr; |
Guvenc Gulce | 099b990 | 2020-12-01 20:20:44 +0100 | [diff] [blame] | 252 | smc_clc_get_hostname(&host); |
| 253 | if (host) { |
Jakub Kicinski | 25fe2c9 | 2021-01-12 17:21:21 +0100 | [diff] [blame] | 254 | memcpy(hostname, host, SMC_MAX_HOSTNAME_LEN); |
| 255 | hostname[SMC_MAX_HOSTNAME_LEN] = 0; |
Guvenc Gulce | 099b990 | 2020-12-01 20:20:44 +0100 | [diff] [blame] | 256 | if (nla_put_string(skb, SMC_NLA_SYS_LOCAL_HOST, hostname)) |
| 257 | goto errattr; |
| 258 | } |
Karsten Graul | 11a26c5 | 2021-09-14 10:35:06 +0200 | [diff] [blame] | 259 | if (smc_ism_is_v2_capable()) { |
| 260 | smc_ism_get_system_eid(&seid); |
Guvenc Gulce | 8a44653 | 2021-01-12 17:21:22 +0100 | [diff] [blame] | 261 | memcpy(smc_seid, seid, SMC_MAX_EID_LEN); |
| 262 | smc_seid[SMC_MAX_EID_LEN] = 0; |
Guvenc Gulce | 099b990 | 2020-12-01 20:20:44 +0100 | [diff] [blame] | 263 | if (nla_put_string(skb, SMC_NLA_SYS_SEID, smc_seid)) |
| 264 | goto errattr; |
| 265 | } |
| 266 | nla_nest_end(skb, attrs); |
| 267 | genlmsg_end(skb, nlh); |
| 268 | cb_ctx->pos[0] = 1; |
| 269 | return skb->len; |
| 270 | |
| 271 | errattr: |
| 272 | nla_nest_cancel(skb, attrs); |
| 273 | errout: |
| 274 | genlmsg_cancel(skb, nlh); |
| 275 | errmsg: |
| 276 | return skb->len; |
| 277 | } |
| 278 | |
Karsten Graul | b0539f5e | 2021-10-16 11:37:51 +0200 | [diff] [blame] | 279 | /* Fill SMC_NLA_LGR_D_V2_COMMON/SMC_NLA_LGR_R_V2_COMMON nested attributes */ |
| 280 | static int smc_nl_fill_lgr_v2_common(struct smc_link_group *lgr, |
| 281 | struct sk_buff *skb, |
| 282 | struct netlink_callback *cb, |
| 283 | struct nlattr *v2_attrs) |
| 284 | { |
| 285 | char smc_host[SMC_MAX_HOSTNAME_LEN + 1]; |
| 286 | char smc_eid[SMC_MAX_EID_LEN + 1]; |
| 287 | |
| 288 | if (nla_put_u8(skb, SMC_NLA_LGR_V2_VER, lgr->smc_version)) |
| 289 | goto errv2attr; |
| 290 | if (nla_put_u8(skb, SMC_NLA_LGR_V2_REL, lgr->peer_smc_release)) |
| 291 | goto errv2attr; |
| 292 | if (nla_put_u8(skb, SMC_NLA_LGR_V2_OS, lgr->peer_os)) |
| 293 | goto errv2attr; |
| 294 | memcpy(smc_host, lgr->peer_hostname, SMC_MAX_HOSTNAME_LEN); |
| 295 | smc_host[SMC_MAX_HOSTNAME_LEN] = 0; |
| 296 | if (nla_put_string(skb, SMC_NLA_LGR_V2_PEER_HOST, smc_host)) |
| 297 | goto errv2attr; |
| 298 | memcpy(smc_eid, lgr->negotiated_eid, SMC_MAX_EID_LEN); |
| 299 | smc_eid[SMC_MAX_EID_LEN] = 0; |
| 300 | if (nla_put_string(skb, SMC_NLA_LGR_V2_NEG_EID, smc_eid)) |
| 301 | goto errv2attr; |
| 302 | |
| 303 | nla_nest_end(skb, v2_attrs); |
| 304 | return 0; |
| 305 | |
| 306 | errv2attr: |
| 307 | nla_nest_cancel(skb, v2_attrs); |
| 308 | return -EMSGSIZE; |
| 309 | } |
| 310 | |
| 311 | static int smc_nl_fill_smcr_lgr_v2(struct smc_link_group *lgr, |
| 312 | struct sk_buff *skb, |
| 313 | struct netlink_callback *cb) |
| 314 | { |
| 315 | struct nlattr *v2_attrs; |
| 316 | |
| 317 | v2_attrs = nla_nest_start(skb, SMC_NLA_LGR_R_V2); |
| 318 | if (!v2_attrs) |
| 319 | goto errattr; |
| 320 | if (nla_put_u8(skb, SMC_NLA_LGR_R_V2_DIRECT, !lgr->uses_gateway)) |
| 321 | goto errv2attr; |
Guangguan Wang | bbed596 | 2023-08-17 21:20:32 +0800 | [diff] [blame] | 322 | if (nla_put_u8(skb, SMC_NLA_LGR_R_V2_MAX_CONNS, lgr->max_conns)) |
| 323 | goto errv2attr; |
| 324 | if (nla_put_u8(skb, SMC_NLA_LGR_R_V2_MAX_LINKS, lgr->max_links)) |
| 325 | goto errv2attr; |
Karsten Graul | b0539f5e | 2021-10-16 11:37:51 +0200 | [diff] [blame] | 326 | |
| 327 | nla_nest_end(skb, v2_attrs); |
| 328 | return 0; |
| 329 | |
| 330 | errv2attr: |
| 331 | nla_nest_cancel(skb, v2_attrs); |
| 332 | errattr: |
| 333 | return -EMSGSIZE; |
| 334 | } |
| 335 | |
Guvenc Gulce | e9b8c84 | 2020-12-01 20:20:45 +0100 | [diff] [blame] | 336 | static int smc_nl_fill_lgr(struct smc_link_group *lgr, |
| 337 | struct sk_buff *skb, |
| 338 | struct netlink_callback *cb) |
| 339 | { |
| 340 | char smc_target[SMC_MAX_PNETID_LEN + 1]; |
Karsten Graul | b0539f5e | 2021-10-16 11:37:51 +0200 | [diff] [blame] | 341 | struct nlattr *attrs, *v2_attrs; |
Guvenc Gulce | e9b8c84 | 2020-12-01 20:20:45 +0100 | [diff] [blame] | 342 | |
| 343 | attrs = nla_nest_start(skb, SMC_GEN_LGR_SMCR); |
| 344 | if (!attrs) |
| 345 | goto errout; |
| 346 | |
| 347 | if (nla_put_u32(skb, SMC_NLA_LGR_R_ID, *((u32 *)&lgr->id))) |
| 348 | goto errattr; |
| 349 | if (nla_put_u32(skb, SMC_NLA_LGR_R_CONNS_NUM, lgr->conns_num)) |
| 350 | goto errattr; |
| 351 | if (nla_put_u8(skb, SMC_NLA_LGR_R_ROLE, lgr->role)) |
| 352 | goto errattr; |
| 353 | if (nla_put_u8(skb, SMC_NLA_LGR_R_TYPE, lgr->type)) |
| 354 | goto errattr; |
Wen Gu | ddefb2d | 2022-07-14 17:44:05 +0800 | [diff] [blame] | 355 | if (nla_put_u8(skb, SMC_NLA_LGR_R_BUF_TYPE, lgr->buf_type)) |
| 356 | goto errattr; |
Guvenc Gulce | e9b8c84 | 2020-12-01 20:20:45 +0100 | [diff] [blame] | 357 | if (nla_put_u8(skb, SMC_NLA_LGR_R_VLAN_ID, lgr->vlan_id)) |
| 358 | goto errattr; |
Tony Lu | 79d39fc | 2021-12-28 21:06:10 +0800 | [diff] [blame] | 359 | if (nla_put_u64_64bit(skb, SMC_NLA_LGR_R_NET_COOKIE, |
| 360 | lgr->net->net_cookie, SMC_NLA_LGR_R_PAD)) |
| 361 | goto errattr; |
Guvenc Gulce | 8a44653 | 2021-01-12 17:21:22 +0100 | [diff] [blame] | 362 | memcpy(smc_target, lgr->pnet_id, SMC_MAX_PNETID_LEN); |
| 363 | smc_target[SMC_MAX_PNETID_LEN] = 0; |
Guvenc Gulce | e9b8c84 | 2020-12-01 20:20:45 +0100 | [diff] [blame] | 364 | if (nla_put_string(skb, SMC_NLA_LGR_R_PNETID, smc_target)) |
| 365 | goto errattr; |
Karsten Graul | b0539f5e | 2021-10-16 11:37:51 +0200 | [diff] [blame] | 366 | if (lgr->smc_version > SMC_V1) { |
| 367 | v2_attrs = nla_nest_start(skb, SMC_NLA_LGR_R_V2_COMMON); |
| 368 | if (!v2_attrs) |
| 369 | goto errattr; |
| 370 | if (smc_nl_fill_lgr_v2_common(lgr, skb, cb, v2_attrs)) |
| 371 | goto errattr; |
| 372 | if (smc_nl_fill_smcr_lgr_v2(lgr, skb, cb)) |
| 373 | goto errattr; |
| 374 | } |
Guvenc Gulce | e9b8c84 | 2020-12-01 20:20:45 +0100 | [diff] [blame] | 375 | |
| 376 | nla_nest_end(skb, attrs); |
| 377 | return 0; |
| 378 | errattr: |
| 379 | nla_nest_cancel(skb, attrs); |
| 380 | errout: |
| 381 | return -EMSGSIZE; |
| 382 | } |
| 383 | |
Guvenc Gulce | 5a7e09d | 2020-12-01 20:20:46 +0100 | [diff] [blame] | 384 | static int smc_nl_fill_lgr_link(struct smc_link_group *lgr, |
| 385 | struct smc_link *link, |
| 386 | struct sk_buff *skb, |
| 387 | struct netlink_callback *cb) |
| 388 | { |
Guvenc Gulce | 8a44653 | 2021-01-12 17:21:22 +0100 | [diff] [blame] | 389 | char smc_ibname[IB_DEVICE_NAME_MAX]; |
Guvenc Gulce | 5a7e09d | 2020-12-01 20:20:46 +0100 | [diff] [blame] | 390 | u8 smc_gid_target[41]; |
| 391 | struct nlattr *attrs; |
| 392 | u32 link_uid = 0; |
| 393 | void *nlh; |
| 394 | |
| 395 | nlh = genlmsg_put(skb, NETLINK_CB(cb->skb).portid, cb->nlh->nlmsg_seq, |
| 396 | &smc_gen_nl_family, NLM_F_MULTI, |
| 397 | SMC_NETLINK_GET_LINK_SMCR); |
| 398 | if (!nlh) |
| 399 | goto errmsg; |
| 400 | |
| 401 | attrs = nla_nest_start(skb, SMC_GEN_LINK_SMCR); |
| 402 | if (!attrs) |
| 403 | goto errout; |
| 404 | |
| 405 | if (nla_put_u8(skb, SMC_NLA_LINK_ID, link->link_id)) |
| 406 | goto errattr; |
| 407 | if (nla_put_u32(skb, SMC_NLA_LINK_STATE, link->state)) |
| 408 | goto errattr; |
| 409 | if (nla_put_u32(skb, SMC_NLA_LINK_CONN_CNT, |
| 410 | atomic_read(&link->conn_cnt))) |
| 411 | goto errattr; |
| 412 | if (nla_put_u8(skb, SMC_NLA_LINK_IB_PORT, link->ibport)) |
| 413 | goto errattr; |
| 414 | if (nla_put_u32(skb, SMC_NLA_LINK_NET_DEV, link->ndev_ifidx)) |
| 415 | goto errattr; |
| 416 | snprintf(smc_ibname, sizeof(smc_ibname), "%s", link->ibname); |
| 417 | if (nla_put_string(skb, SMC_NLA_LINK_IB_DEV, smc_ibname)) |
| 418 | goto errattr; |
| 419 | memcpy(&link_uid, link->link_uid, sizeof(link_uid)); |
| 420 | if (nla_put_u32(skb, SMC_NLA_LINK_UID, link_uid)) |
| 421 | goto errattr; |
| 422 | memcpy(&link_uid, link->peer_link_uid, sizeof(link_uid)); |
| 423 | if (nla_put_u32(skb, SMC_NLA_LINK_PEER_UID, link_uid)) |
| 424 | goto errattr; |
| 425 | memset(smc_gid_target, 0, sizeof(smc_gid_target)); |
| 426 | smc_gid_be16_convert(smc_gid_target, link->gid); |
| 427 | if (nla_put_string(skb, SMC_NLA_LINK_GID, smc_gid_target)) |
| 428 | goto errattr; |
| 429 | memset(smc_gid_target, 0, sizeof(smc_gid_target)); |
| 430 | smc_gid_be16_convert(smc_gid_target, link->peer_gid); |
| 431 | if (nla_put_string(skb, SMC_NLA_LINK_PEER_GID, smc_gid_target)) |
| 432 | goto errattr; |
| 433 | |
| 434 | nla_nest_end(skb, attrs); |
| 435 | genlmsg_end(skb, nlh); |
| 436 | return 0; |
| 437 | errattr: |
| 438 | nla_nest_cancel(skb, attrs); |
| 439 | errout: |
| 440 | genlmsg_cancel(skb, nlh); |
| 441 | errmsg: |
| 442 | return -EMSGSIZE; |
| 443 | } |
| 444 | |
Guvenc Gulce | e9b8c84 | 2020-12-01 20:20:45 +0100 | [diff] [blame] | 445 | static int smc_nl_handle_lgr(struct smc_link_group *lgr, |
| 446 | struct sk_buff *skb, |
Guvenc Gulce | 5a7e09d | 2020-12-01 20:20:46 +0100 | [diff] [blame] | 447 | struct netlink_callback *cb, |
| 448 | bool list_links) |
Guvenc Gulce | e9b8c84 | 2020-12-01 20:20:45 +0100 | [diff] [blame] | 449 | { |
| 450 | void *nlh; |
Guvenc Gulce | 5a7e09d | 2020-12-01 20:20:46 +0100 | [diff] [blame] | 451 | int i; |
Guvenc Gulce | e9b8c84 | 2020-12-01 20:20:45 +0100 | [diff] [blame] | 452 | |
| 453 | nlh = genlmsg_put(skb, NETLINK_CB(cb->skb).portid, cb->nlh->nlmsg_seq, |
| 454 | &smc_gen_nl_family, NLM_F_MULTI, |
| 455 | SMC_NETLINK_GET_LGR_SMCR); |
| 456 | if (!nlh) |
| 457 | goto errmsg; |
| 458 | if (smc_nl_fill_lgr(lgr, skb, cb)) |
| 459 | goto errout; |
| 460 | |
| 461 | genlmsg_end(skb, nlh); |
Guvenc Gulce | 5a7e09d | 2020-12-01 20:20:46 +0100 | [diff] [blame] | 462 | if (!list_links) |
| 463 | goto out; |
| 464 | for (i = 0; i < SMC_LINKS_PER_LGR_MAX; i++) { |
| 465 | if (!smc_link_usable(&lgr->lnk[i])) |
| 466 | continue; |
| 467 | if (smc_nl_fill_lgr_link(lgr, &lgr->lnk[i], skb, cb)) |
| 468 | goto errout; |
| 469 | } |
| 470 | out: |
Guvenc Gulce | e9b8c84 | 2020-12-01 20:20:45 +0100 | [diff] [blame] | 471 | return 0; |
| 472 | |
| 473 | errout: |
| 474 | genlmsg_cancel(skb, nlh); |
| 475 | errmsg: |
| 476 | return -EMSGSIZE; |
| 477 | } |
| 478 | |
| 479 | static void smc_nl_fill_lgr_list(struct smc_lgr_list *smc_lgr, |
| 480 | struct sk_buff *skb, |
Guvenc Gulce | 5a7e09d | 2020-12-01 20:20:46 +0100 | [diff] [blame] | 481 | struct netlink_callback *cb, |
| 482 | bool list_links) |
Guvenc Gulce | e9b8c84 | 2020-12-01 20:20:45 +0100 | [diff] [blame] | 483 | { |
| 484 | struct smc_nl_dmp_ctx *cb_ctx = smc_nl_dmp_ctx(cb); |
| 485 | struct smc_link_group *lgr; |
| 486 | int snum = cb_ctx->pos[0]; |
| 487 | int num = 0; |
| 488 | |
| 489 | spin_lock_bh(&smc_lgr->lock); |
| 490 | list_for_each_entry(lgr, &smc_lgr->list, list) { |
| 491 | if (num < snum) |
| 492 | goto next; |
Guvenc Gulce | 5a7e09d | 2020-12-01 20:20:46 +0100 | [diff] [blame] | 493 | if (smc_nl_handle_lgr(lgr, skb, cb, list_links)) |
Guvenc Gulce | e9b8c84 | 2020-12-01 20:20:45 +0100 | [diff] [blame] | 494 | goto errout; |
| 495 | next: |
| 496 | num++; |
| 497 | } |
| 498 | errout: |
| 499 | spin_unlock_bh(&smc_lgr->lock); |
| 500 | cb_ctx->pos[0] = num; |
| 501 | } |
| 502 | |
Guvenc Gulce | 8f9dde4 | 2020-12-01 20:20:47 +0100 | [diff] [blame] | 503 | static int smc_nl_fill_smcd_lgr(struct smc_link_group *lgr, |
| 504 | struct sk_buff *skb, |
| 505 | struct netlink_callback *cb) |
| 506 | { |
Guvenc Gulce | 8f9dde4 | 2020-12-01 20:20:47 +0100 | [diff] [blame] | 507 | char smc_pnet[SMC_MAX_PNETID_LEN + 1]; |
Stefan Raspl | 9de4df7 | 2023-01-23 19:17:50 +0100 | [diff] [blame] | 508 | struct smcd_dev *smcd = lgr->smcd; |
Guvenc Gulce | 8f9dde4 | 2020-12-01 20:20:47 +0100 | [diff] [blame] | 509 | struct nlattr *attrs; |
| 510 | void *nlh; |
| 511 | |
| 512 | nlh = genlmsg_put(skb, NETLINK_CB(cb->skb).portid, cb->nlh->nlmsg_seq, |
| 513 | &smc_gen_nl_family, NLM_F_MULTI, |
| 514 | SMC_NETLINK_GET_LGR_SMCD); |
| 515 | if (!nlh) |
| 516 | goto errmsg; |
| 517 | |
| 518 | attrs = nla_nest_start(skb, SMC_GEN_LGR_SMCD); |
| 519 | if (!attrs) |
| 520 | goto errout; |
| 521 | |
| 522 | if (nla_put_u32(skb, SMC_NLA_LGR_D_ID, *((u32 *)&lgr->id))) |
| 523 | goto errattr; |
Stefan Raspl | 9de4df7 | 2023-01-23 19:17:50 +0100 | [diff] [blame] | 524 | if (nla_put_u64_64bit(skb, SMC_NLA_LGR_D_GID, |
| 525 | smcd->ops->get_local_gid(smcd), |
| 526 | SMC_NLA_LGR_D_PAD)) |
Guvenc Gulce | 8f9dde4 | 2020-12-01 20:20:47 +0100 | [diff] [blame] | 527 | goto errattr; |
| 528 | if (nla_put_u64_64bit(skb, SMC_NLA_LGR_D_PEER_GID, lgr->peer_gid, |
| 529 | SMC_NLA_LGR_D_PAD)) |
| 530 | goto errattr; |
| 531 | if (nla_put_u8(skb, SMC_NLA_LGR_D_VLAN_ID, lgr->vlan_id)) |
| 532 | goto errattr; |
| 533 | if (nla_put_u32(skb, SMC_NLA_LGR_D_CONNS_NUM, lgr->conns_num)) |
| 534 | goto errattr; |
| 535 | if (nla_put_u32(skb, SMC_NLA_LGR_D_CHID, smc_ism_get_chid(lgr->smcd))) |
| 536 | goto errattr; |
Guvenc Gulce | 8a44653 | 2021-01-12 17:21:22 +0100 | [diff] [blame] | 537 | memcpy(smc_pnet, lgr->smcd->pnetid, SMC_MAX_PNETID_LEN); |
| 538 | smc_pnet[SMC_MAX_PNETID_LEN] = 0; |
Guvenc Gulce | 8f9dde4 | 2020-12-01 20:20:47 +0100 | [diff] [blame] | 539 | if (nla_put_string(skb, SMC_NLA_LGR_D_PNETID, smc_pnet)) |
| 540 | goto errattr; |
Karsten Graul | b0539f5e | 2021-10-16 11:37:51 +0200 | [diff] [blame] | 541 | if (lgr->smc_version > SMC_V1) { |
| 542 | struct nlattr *v2_attrs; |
Guvenc Gulce | 8f9dde4 | 2020-12-01 20:20:47 +0100 | [diff] [blame] | 543 | |
Karsten Graul | b0539f5e | 2021-10-16 11:37:51 +0200 | [diff] [blame] | 544 | v2_attrs = nla_nest_start(skb, SMC_NLA_LGR_D_V2_COMMON); |
| 545 | if (!v2_attrs) |
| 546 | goto errattr; |
| 547 | if (smc_nl_fill_lgr_v2_common(lgr, skb, cb, v2_attrs)) |
| 548 | goto errattr; |
| 549 | } |
Guvenc Gulce | 8f9dde4 | 2020-12-01 20:20:47 +0100 | [diff] [blame] | 550 | nla_nest_end(skb, attrs); |
| 551 | genlmsg_end(skb, nlh); |
| 552 | return 0; |
| 553 | |
Guvenc Gulce | 8f9dde4 | 2020-12-01 20:20:47 +0100 | [diff] [blame] | 554 | errattr: |
| 555 | nla_nest_cancel(skb, attrs); |
| 556 | errout: |
| 557 | genlmsg_cancel(skb, nlh); |
| 558 | errmsg: |
| 559 | return -EMSGSIZE; |
| 560 | } |
| 561 | |
| 562 | static int smc_nl_handle_smcd_lgr(struct smcd_dev *dev, |
| 563 | struct sk_buff *skb, |
| 564 | struct netlink_callback *cb) |
| 565 | { |
| 566 | struct smc_nl_dmp_ctx *cb_ctx = smc_nl_dmp_ctx(cb); |
| 567 | struct smc_link_group *lgr; |
| 568 | int snum = cb_ctx->pos[1]; |
| 569 | int rc = 0, num = 0; |
| 570 | |
| 571 | spin_lock_bh(&dev->lgr_lock); |
| 572 | list_for_each_entry(lgr, &dev->lgr_list, list) { |
| 573 | if (!lgr->is_smcd) |
| 574 | continue; |
| 575 | if (num < snum) |
| 576 | goto next; |
| 577 | rc = smc_nl_fill_smcd_lgr(lgr, skb, cb); |
| 578 | if (rc) |
| 579 | goto errout; |
| 580 | next: |
| 581 | num++; |
| 582 | } |
| 583 | errout: |
| 584 | spin_unlock_bh(&dev->lgr_lock); |
| 585 | cb_ctx->pos[1] = num; |
| 586 | return rc; |
| 587 | } |
| 588 | |
| 589 | static int smc_nl_fill_smcd_dev(struct smcd_dev_list *dev_list, |
| 590 | struct sk_buff *skb, |
| 591 | struct netlink_callback *cb) |
| 592 | { |
| 593 | struct smc_nl_dmp_ctx *cb_ctx = smc_nl_dmp_ctx(cb); |
| 594 | struct smcd_dev *smcd_dev; |
| 595 | int snum = cb_ctx->pos[0]; |
| 596 | int rc = 0, num = 0; |
| 597 | |
| 598 | mutex_lock(&dev_list->mutex); |
| 599 | list_for_each_entry(smcd_dev, &dev_list->list, list) { |
| 600 | if (list_empty(&smcd_dev->lgr_list)) |
| 601 | continue; |
| 602 | if (num < snum) |
| 603 | goto next; |
| 604 | rc = smc_nl_handle_smcd_lgr(smcd_dev, skb, cb); |
| 605 | if (rc) |
| 606 | goto errout; |
| 607 | next: |
| 608 | num++; |
| 609 | } |
| 610 | errout: |
| 611 | mutex_unlock(&dev_list->mutex); |
| 612 | cb_ctx->pos[0] = num; |
| 613 | return rc; |
| 614 | } |
| 615 | |
Guvenc Gulce | e9b8c84 | 2020-12-01 20:20:45 +0100 | [diff] [blame] | 616 | int smcr_nl_get_lgr(struct sk_buff *skb, struct netlink_callback *cb) |
| 617 | { |
Guvenc Gulce | 5a7e09d | 2020-12-01 20:20:46 +0100 | [diff] [blame] | 618 | bool list_links = false; |
| 619 | |
| 620 | smc_nl_fill_lgr_list(&smc_lgr_list, skb, cb, list_links); |
| 621 | return skb->len; |
| 622 | } |
| 623 | |
| 624 | int smcr_nl_get_link(struct sk_buff *skb, struct netlink_callback *cb) |
| 625 | { |
| 626 | bool list_links = true; |
| 627 | |
| 628 | smc_nl_fill_lgr_list(&smc_lgr_list, skb, cb, list_links); |
Guvenc Gulce | e9b8c84 | 2020-12-01 20:20:45 +0100 | [diff] [blame] | 629 | return skb->len; |
| 630 | } |
| 631 | |
Guvenc Gulce | 8f9dde4 | 2020-12-01 20:20:47 +0100 | [diff] [blame] | 632 | int smcd_nl_get_lgr(struct sk_buff *skb, struct netlink_callback *cb) |
| 633 | { |
| 634 | smc_nl_fill_smcd_dev(&smcd_dev_list, skb, cb); |
| 635 | return skb->len; |
| 636 | } |
| 637 | |
Wen Gu | 36595d8 | 2022-01-06 20:42:08 +0800 | [diff] [blame] | 638 | void smc_lgr_cleanup_early(struct smc_link_group *lgr) |
Ursula Braun | 51e3dfa | 2020-02-25 16:34:36 +0100 | [diff] [blame] | 639 | { |
Karsten Graul | 9ec6bf1 | 2020-05-03 14:38:47 +0200 | [diff] [blame] | 640 | spinlock_t *lgr_lock; |
Ursula Braun | 51e3dfa | 2020-02-25 16:34:36 +0100 | [diff] [blame] | 641 | |
| 642 | if (!lgr) |
| 643 | return; |
| 644 | |
Dust Li | 789b6cc | 2021-12-01 11:02:30 +0800 | [diff] [blame] | 645 | smc_lgr_list_head(lgr, &lgr_lock); |
Karsten Graul | 9ec6bf1 | 2020-05-03 14:38:47 +0200 | [diff] [blame] | 646 | spin_lock_bh(lgr_lock); |
| 647 | /* do not use this link group for new connections */ |
Dust Li | 789b6cc | 2021-12-01 11:02:30 +0800 | [diff] [blame] | 648 | if (!list_empty(&lgr->list)) |
| 649 | list_del_init(&lgr->list); |
Karsten Graul | 9ec6bf1 | 2020-05-03 14:38:47 +0200 | [diff] [blame] | 650 | spin_unlock_bh(lgr_lock); |
Ursula Braun | f9aab6f | 2020-09-10 18:48:26 +0200 | [diff] [blame] | 651 | __smc_lgr_terminate(lgr, true); |
Ursula Braun | 51e3dfa | 2020-02-25 16:34:36 +0100 | [diff] [blame] | 652 | } |
| 653 | |
Karsten Graul | a52bcc9 | 2020-05-04 14:18:46 +0200 | [diff] [blame] | 654 | static void smcr_lgr_link_deactivate_all(struct smc_link_group *lgr) |
| 655 | { |
| 656 | int i; |
| 657 | |
| 658 | for (i = 0; i < SMC_LINKS_PER_LGR_MAX; i++) { |
| 659 | struct smc_link *lnk = &lgr->lnk[i]; |
| 660 | |
Dust Li | 90cee52 | 2021-12-28 17:03:24 +0800 | [diff] [blame] | 661 | if (smc_link_sendable(lnk)) |
Karsten Graul | a52bcc9 | 2020-05-04 14:18:46 +0200 | [diff] [blame] | 662 | lnk->state = SMC_LNK_INACTIVE; |
| 663 | } |
Karsten Graul | 6778a6b | 2020-07-08 17:05:11 +0200 | [diff] [blame] | 664 | wake_up_all(&lgr->llc_msg_waiter); |
| 665 | wake_up_all(&lgr->llc_flow_waiter); |
Karsten Graul | a52bcc9 | 2020-05-04 14:18:46 +0200 | [diff] [blame] | 666 | } |
| 667 | |
Ursula Braun | 3f3f0e3 | 2018-11-22 10:26:35 +0100 | [diff] [blame] | 668 | static void smc_lgr_free(struct smc_link_group *lgr); |
| 669 | |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 670 | static void smc_lgr_free_work(struct work_struct *work) |
| 671 | { |
| 672 | struct smc_link_group *lgr = container_of(to_delayed_work(work), |
| 673 | struct smc_link_group, |
| 674 | free_work); |
Ursula Braun | a0a62ee | 2019-10-09 10:07:44 +0200 | [diff] [blame] | 675 | spinlock_t *lgr_lock; |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 676 | bool conns; |
| 677 | |
Ursula Braun | a0a62ee | 2019-10-09 10:07:44 +0200 | [diff] [blame] | 678 | smc_lgr_list_head(lgr, &lgr_lock); |
| 679 | spin_lock_bh(lgr_lock); |
Ursula Braun | 8e316b9 | 2019-10-21 16:13:11 +0200 | [diff] [blame] | 680 | if (lgr->freeing) { |
| 681 | spin_unlock_bh(lgr_lock); |
| 682 | return; |
| 683 | } |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 684 | read_lock_bh(&lgr->conns_lock); |
| 685 | conns = RB_EMPTY_ROOT(&lgr->conns_all); |
| 686 | read_unlock_bh(&lgr->conns_lock); |
| 687 | if (!conns) { /* number of lgr connections is no longer zero */ |
Ursula Braun | a0a62ee | 2019-10-09 10:07:44 +0200 | [diff] [blame] | 688 | spin_unlock_bh(lgr_lock); |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 689 | return; |
| 690 | } |
Ursula Braun | 8caa654 | 2019-10-21 16:13:09 +0200 | [diff] [blame] | 691 | list_del_init(&lgr->list); /* remove from smc_lgr_list */ |
Ursula Braun | 8e316b9 | 2019-10-21 16:13:11 +0200 | [diff] [blame] | 692 | lgr->freeing = 1; /* this instance does the freeing, no new schedule */ |
| 693 | spin_unlock_bh(lgr_lock); |
| 694 | cancel_delayed_work(&lgr->free_work); |
Karsten Graul | 0d18a0cb | 2018-07-25 16:35:33 +0200 | [diff] [blame] | 695 | |
Karsten Graul | f3811fd | 2020-05-04 14:18:42 +0200 | [diff] [blame] | 696 | if (!lgr->is_smcd && !lgr->terminating) |
| 697 | smc_llc_send_link_delete_all(lgr, true, |
| 698 | SMC_LLC_DEL_PROG_INIT_TERM); |
Ursula Braun | 42bfba9 | 2019-11-14 13:02:41 +0100 | [diff] [blame] | 699 | if (lgr->is_smcd && !lgr->terminating) |
Ursula Braun | 8e316b9 | 2019-10-21 16:13:11 +0200 | [diff] [blame] | 700 | smc_ism_signal_shutdown(lgr); |
Karsten Graul | a52bcc9 | 2020-05-04 14:18:46 +0200 | [diff] [blame] | 701 | if (!lgr->is_smcd) |
| 702 | smcr_lgr_link_deactivate_all(lgr); |
Ursula Braun | 8e316b9 | 2019-10-21 16:13:11 +0200 | [diff] [blame] | 703 | smc_lgr_free(lgr); |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 704 | } |
| 705 | |
Ursula Braun | f528ba2 | 2019-10-21 16:13:14 +0200 | [diff] [blame] | 706 | static void smc_lgr_terminate_work(struct work_struct *work) |
| 707 | { |
| 708 | struct smc_link_group *lgr = container_of(work, struct smc_link_group, |
| 709 | terminate_work); |
| 710 | |
Karsten Graul | 5f78fe9 | 2020-02-17 16:24:54 +0100 | [diff] [blame] | 711 | __smc_lgr_terminate(lgr, true); |
Ursula Braun | f528ba2 | 2019-10-21 16:13:14 +0200 | [diff] [blame] | 712 | } |
| 713 | |
Karsten Graul | 026c381 | 2020-04-29 17:10:39 +0200 | [diff] [blame] | 714 | /* return next unique link id for the lgr */ |
| 715 | static u8 smcr_next_link_id(struct smc_link_group *lgr) |
| 716 | { |
| 717 | u8 link_id; |
| 718 | int i; |
| 719 | |
| 720 | while (1) { |
Wen Gu | cf4f553 | 2021-11-15 17:45:07 +0800 | [diff] [blame] | 721 | again: |
Karsten Graul | 026c381 | 2020-04-29 17:10:39 +0200 | [diff] [blame] | 722 | link_id = ++lgr->next_link_id; |
| 723 | if (!link_id) /* skip zero as link_id */ |
| 724 | link_id = ++lgr->next_link_id; |
| 725 | for (i = 0; i < SMC_LINKS_PER_LGR_MAX; i++) { |
Karsten Graul | d854fcb | 2020-04-29 17:10:43 +0200 | [diff] [blame] | 726 | if (smc_link_usable(&lgr->lnk[i]) && |
Karsten Graul | 026c381 | 2020-04-29 17:10:39 +0200 | [diff] [blame] | 727 | lgr->lnk[i].link_id == link_id) |
Wen Gu | cf4f553 | 2021-11-15 17:45:07 +0800 | [diff] [blame] | 728 | goto again; |
Karsten Graul | 026c381 | 2020-04-29 17:10:39 +0200 | [diff] [blame] | 729 | } |
| 730 | break; |
| 731 | } |
| 732 | return link_id; |
| 733 | } |
| 734 | |
Guvenc Gulce | 6443b2f | 2020-12-01 20:20:41 +0100 | [diff] [blame] | 735 | static void smcr_copy_dev_info_to_link(struct smc_link *link) |
| 736 | { |
| 737 | struct smc_ib_device *smcibdev = link->smcibdev; |
| 738 | |
| 739 | snprintf(link->ibname, sizeof(link->ibname), "%s", |
| 740 | smcibdev->ibdev->name); |
| 741 | link->ndev_ifidx = smcibdev->ndev_ifidx[link->ibport - 1]; |
| 742 | } |
| 743 | |
Karsten Graul | 336ba09 | 2020-05-03 14:38:40 +0200 | [diff] [blame] | 744 | int smcr_link_init(struct smc_link_group *lgr, struct smc_link *lnk, |
| 745 | u8 link_idx, struct smc_init_info *ini) |
Karsten Graul | f3c1ded | 2020-04-29 17:10:38 +0200 | [diff] [blame] | 746 | { |
Karsten Graul | e49300a | 2021-10-16 11:37:46 +0200 | [diff] [blame] | 747 | struct smc_ib_device *smcibdev; |
Karsten Graul | f3c1ded | 2020-04-29 17:10:38 +0200 | [diff] [blame] | 748 | u8 rndvec[3]; |
| 749 | int rc; |
| 750 | |
Karsten Graul | e49300a | 2021-10-16 11:37:46 +0200 | [diff] [blame] | 751 | if (lgr->smc_version == SMC_V2) { |
| 752 | lnk->smcibdev = ini->smcrv2.ib_dev_v2; |
| 753 | lnk->ibport = ini->smcrv2.ib_port_v2; |
| 754 | } else { |
| 755 | lnk->smcibdev = ini->ib_dev; |
| 756 | lnk->ibport = ini->ib_port; |
| 757 | } |
| 758 | get_device(&lnk->smcibdev->ibdev->dev); |
| 759 | atomic_inc(&lnk->smcibdev->lnk_cnt); |
Wen Gu | 20c9398 | 2022-01-13 16:36:42 +0800 | [diff] [blame] | 760 | refcount_set(&lnk->refcnt, 1); /* link refcnt is set to 1 */ |
| 761 | lnk->clearing = 0; |
Karsten Graul | e49300a | 2021-10-16 11:37:46 +0200 | [diff] [blame] | 762 | lnk->path_mtu = lnk->smcibdev->pattr[lnk->ibport - 1].active_mtu; |
Karsten Graul | 026c381 | 2020-04-29 17:10:39 +0200 | [diff] [blame] | 763 | lnk->link_id = smcr_next_link_id(lgr); |
Karsten Graul | 387707f | 2020-04-29 17:10:40 +0200 | [diff] [blame] | 764 | lnk->lgr = lgr; |
Wen Gu | 61f434b | 2022-01-13 16:36:40 +0800 | [diff] [blame] | 765 | smc_lgr_hold(lgr); /* lgr_put in smcr_link_clear() */ |
Karsten Graul | 026c381 | 2020-04-29 17:10:39 +0200 | [diff] [blame] | 766 | lnk->link_idx = link_idx; |
Yacan Liu | e9b1a4f | 2022-09-06 21:01:39 +0800 | [diff] [blame] | 767 | lnk->wr_rx_id_compl = 0; |
Guvenc Gulce | ddc9928 | 2020-12-01 20:20:39 +0100 | [diff] [blame] | 768 | smc_ibdev_cnt_inc(lnk); |
Guvenc Gulce | 6443b2f | 2020-12-01 20:20:41 +0100 | [diff] [blame] | 769 | smcr_copy_dev_info_to_link(lnk); |
Guvenc Gulce | 07d5158 | 2020-12-01 20:20:38 +0100 | [diff] [blame] | 770 | atomic_set(&lnk->conn_cnt, 0); |
Karsten Graul | 45fa8da | 2020-05-04 14:18:47 +0200 | [diff] [blame] | 771 | smc_llc_link_set_uid(lnk); |
Karsten Graul | 541afa1 | 2020-05-01 12:48:08 +0200 | [diff] [blame] | 772 | INIT_WORK(&lnk->link_down_wrk, smc_link_down_work); |
Karsten Graul | e49300a | 2021-10-16 11:37:46 +0200 | [diff] [blame] | 773 | if (!lnk->smcibdev->initialized) { |
| 774 | rc = (int)smc_ib_setup_per_ibdev(lnk->smcibdev); |
Karsten Graul | f3c1ded | 2020-04-29 17:10:38 +0200 | [diff] [blame] | 775 | if (rc) |
| 776 | goto out; |
| 777 | } |
| 778 | get_random_bytes(rndvec, sizeof(rndvec)); |
| 779 | lnk->psn_initial = rndvec[0] + (rndvec[1] << 8) + |
| 780 | (rndvec[2] << 16); |
| 781 | rc = smc_ib_determine_gid(lnk->smcibdev, lnk->ibport, |
Karsten Graul | 24fb681 | 2021-10-16 11:37:48 +0200 | [diff] [blame] | 782 | ini->vlan_id, lnk->gid, &lnk->sgid_index, |
| 783 | lgr->smc_version == SMC_V2 ? |
| 784 | &ini->smcrv2 : NULL); |
Karsten Graul | f3c1ded | 2020-04-29 17:10:38 +0200 | [diff] [blame] | 785 | if (rc) |
| 786 | goto out; |
| 787 | rc = smc_llc_link_init(lnk); |
| 788 | if (rc) |
| 789 | goto out; |
| 790 | rc = smc_wr_alloc_link_mem(lnk); |
| 791 | if (rc) |
| 792 | goto clear_llc_lnk; |
| 793 | rc = smc_ib_create_protection_domain(lnk); |
| 794 | if (rc) |
| 795 | goto free_link_mem; |
| 796 | rc = smc_ib_create_queue_pair(lnk); |
| 797 | if (rc) |
| 798 | goto dealloc_pd; |
| 799 | rc = smc_wr_create_link(lnk); |
| 800 | if (rc) |
| 801 | goto destroy_qp; |
Karsten Graul | 741a49a | 2020-07-18 15:06:16 +0200 | [diff] [blame] | 802 | lnk->state = SMC_LNK_ACTIVATING; |
Karsten Graul | f3c1ded | 2020-04-29 17:10:38 +0200 | [diff] [blame] | 803 | return 0; |
| 804 | |
| 805 | destroy_qp: |
| 806 | smc_ib_destroy_queue_pair(lnk); |
| 807 | dealloc_pd: |
| 808 | smc_ib_dealloc_protection_domain(lnk); |
| 809 | free_link_mem: |
| 810 | smc_wr_free_link_mem(lnk); |
| 811 | clear_llc_lnk: |
Karsten Graul | 0a99be4 | 2020-05-05 15:01:20 +0200 | [diff] [blame] | 812 | smc_llc_link_clear(lnk, false); |
Karsten Graul | f3c1ded | 2020-04-29 17:10:38 +0200 | [diff] [blame] | 813 | out: |
Guvenc Gulce | ddc9928 | 2020-12-01 20:20:39 +0100 | [diff] [blame] | 814 | smc_ibdev_cnt_dec(lnk); |
Karsten Graul | e49300a | 2021-10-16 11:37:46 +0200 | [diff] [blame] | 815 | put_device(&lnk->smcibdev->ibdev->dev); |
| 816 | smcibdev = lnk->smcibdev; |
Karsten Graul | f3c1ded | 2020-04-29 17:10:38 +0200 | [diff] [blame] | 817 | memset(lnk, 0, sizeof(struct smc_link)); |
Karsten Graul | d854fcb | 2020-04-29 17:10:43 +0200 | [diff] [blame] | 818 | lnk->state = SMC_LNK_UNUSED; |
Karsten Graul | e49300a | 2021-10-16 11:37:46 +0200 | [diff] [blame] | 819 | if (!atomic_dec_return(&smcibdev->lnk_cnt)) |
| 820 | wake_up(&smcibdev->lnks_deleted); |
Wen Gu | 61f434b | 2022-01-13 16:36:40 +0800 | [diff] [blame] | 821 | smc_lgr_put(lgr); /* lgr_hold above */ |
Karsten Graul | f3c1ded | 2020-04-29 17:10:38 +0200 | [diff] [blame] | 822 | return rc; |
| 823 | } |
| 824 | |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 825 | /* create a new SMC link group */ |
Karsten Graul | bc36d2f | 2019-04-12 12:57:26 +0200 | [diff] [blame] | 826 | static int smc_lgr_create(struct smc_sock *smc, struct smc_init_info *ini) |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 827 | { |
| 828 | struct smc_link_group *lgr; |
Ursula Braun | a2351c5 | 2019-10-09 10:07:43 +0200 | [diff] [blame] | 829 | struct list_head *lgr_list; |
Stefan Raspl | 8c81ba2 | 2023-01-23 19:17:52 +0100 | [diff] [blame] | 830 | struct smcd_dev *smcd; |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 831 | struct smc_link *lnk; |
Ursula Braun | a0a62ee | 2019-10-09 10:07:44 +0200 | [diff] [blame] | 832 | spinlock_t *lgr_lock; |
Karsten Graul | 026c381 | 2020-04-29 17:10:39 +0200 | [diff] [blame] | 833 | u8 link_idx; |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 834 | int rc = 0; |
Ursula Braun | cd6851f | 2017-01-09 16:55:18 +0100 | [diff] [blame] | 835 | int i; |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 836 | |
Karsten Graul | bc36d2f | 2019-04-12 12:57:26 +0200 | [diff] [blame] | 837 | if (ini->is_smcd && ini->vlan_id) { |
Ursula Braun | 5c21c4c | 2020-09-26 12:44:29 +0200 | [diff] [blame] | 838 | if (smc_ism_get_vlan(ini->ism_dev[ini->ism_selected], |
| 839 | ini->vlan_id)) { |
Karsten Graul | 7a62725a | 2019-04-12 12:57:30 +0200 | [diff] [blame] | 840 | rc = SMC_CLC_DECL_ISMVLANERR; |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 841 | goto out; |
Karsten Graul | 7a62725a | 2019-04-12 12:57:30 +0200 | [diff] [blame] | 842 | } |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 843 | } |
| 844 | |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 845 | lgr = kzalloc(sizeof(*lgr), GFP_KERNEL); |
| 846 | if (!lgr) { |
Karsten Graul | 7a62725a | 2019-04-12 12:57:30 +0200 | [diff] [blame] | 847 | rc = SMC_CLC_DECL_MEM; |
Ursula Braun | 29ee270 | 2019-10-10 10:16:09 +0200 | [diff] [blame] | 848 | goto ism_put_vlan; |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 849 | } |
Karsten Graul | 22ef473 | 2020-09-10 18:48:29 +0200 | [diff] [blame] | 850 | lgr->tx_wq = alloc_workqueue("smc_tx_wq-%*phN", 0, 0, |
| 851 | SMC_LGR_ID_SIZE, &lgr->id); |
| 852 | if (!lgr->tx_wq) { |
| 853 | rc = -ENOMEM; |
| 854 | goto free_lgr; |
| 855 | } |
Karsten Graul | bc36d2f | 2019-04-12 12:57:26 +0200 | [diff] [blame] | 856 | lgr->is_smcd = ini->is_smcd; |
Karsten Graul | 517c300 | 2018-05-15 17:05:03 +0200 | [diff] [blame] | 857 | lgr->sync_err = 0; |
Ursula Braun | 8e316b9 | 2019-10-21 16:13:11 +0200 | [diff] [blame] | 858 | lgr->terminating = 0; |
Ursula Braun | 8e316b9 | 2019-10-21 16:13:11 +0200 | [diff] [blame] | 859 | lgr->freeing = 0; |
Karsten Graul | bc36d2f | 2019-04-12 12:57:26 +0200 | [diff] [blame] | 860 | lgr->vlan_id = ini->vlan_id; |
Wen Gu | 61f434b | 2022-01-13 16:36:40 +0800 | [diff] [blame] | 861 | refcount_set(&lgr->refcnt, 1); /* set lgr refcnt to 1 */ |
D. Wythe | aff7bfe | 2023-02-02 16:26:42 +0800 | [diff] [blame] | 862 | init_rwsem(&lgr->sndbufs_lock); |
| 863 | init_rwsem(&lgr->rmbs_lock); |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 864 | rwlock_init(&lgr->conns_lock); |
Ursula Braun | cd6851f | 2017-01-09 16:55:18 +0100 | [diff] [blame] | 865 | for (i = 0; i < SMC_RMBE_SIZES; i++) { |
| 866 | INIT_LIST_HEAD(&lgr->sndbufs[i]); |
| 867 | INIT_LIST_HEAD(&lgr->rmbs[i]); |
| 868 | } |
Karsten Graul | 026c381 | 2020-04-29 17:10:39 +0200 | [diff] [blame] | 869 | lgr->next_link_id = 0; |
Hans Wippel | 9fda351 | 2018-05-18 09:34:11 +0200 | [diff] [blame] | 870 | smc_lgr_list.num += SMC_LGR_NUM_INCR; |
| 871 | memcpy(&lgr->id, (u8 *)&smc_lgr_list.num, SMC_LGR_ID_SIZE); |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 872 | INIT_DELAYED_WORK(&lgr->free_work, smc_lgr_free_work); |
Ursula Braun | f528ba2 | 2019-10-21 16:13:14 +0200 | [diff] [blame] | 873 | INIT_WORK(&lgr->terminate_work, smc_lgr_terminate_work); |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 874 | lgr->conns_all = RB_ROOT; |
Karsten Graul | bc36d2f | 2019-04-12 12:57:26 +0200 | [diff] [blame] | 875 | if (ini->is_smcd) { |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 876 | /* SMC-D specific settings */ |
Stefan Raspl | 8c81ba2 | 2023-01-23 19:17:52 +0100 | [diff] [blame] | 877 | smcd = ini->ism_dev[ini->ism_selected]; |
| 878 | get_device(smcd->ops->get_dev(smcd)); |
Ursula Braun | 5c21c4c | 2020-09-26 12:44:29 +0200 | [diff] [blame] | 879 | lgr->peer_gid = ini->ism_peer_gid[ini->ism_selected]; |
| 880 | lgr->smcd = ini->ism_dev[ini->ism_selected]; |
| 881 | lgr_list = &ini->ism_dev[ini->ism_selected]->lgr_list; |
Ursula Braun | a0a62ee | 2019-10-09 10:07:44 +0200 | [diff] [blame] | 882 | lgr_lock = &lgr->smcd->lgr_lock; |
Ursula Braun | b81a5eb | 2020-09-26 12:44:31 +0200 | [diff] [blame] | 883 | lgr->smc_version = ini->smcd_version; |
Ursula Braun | 50c6b20 | 2019-11-14 13:02:40 +0100 | [diff] [blame] | 884 | lgr->peer_shutdown = 0; |
Ursula Braun | 5c21c4c | 2020-09-26 12:44:29 +0200 | [diff] [blame] | 885 | atomic_inc(&ini->ism_dev[ini->ism_selected]->lgr_cnt); |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 886 | } else { |
| 887 | /* SMC-R specific settings */ |
Karsten Graul | e49300a | 2021-10-16 11:37:46 +0200 | [diff] [blame] | 888 | struct smc_ib_device *ibdev; |
| 889 | int ibport; |
| 890 | |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 891 | lgr->role = smc->listen_smc ? SMC_SERV : SMC_CLNT; |
Karsten Graul | e49300a | 2021-10-16 11:37:46 +0200 | [diff] [blame] | 892 | lgr->smc_version = ini->smcr_version; |
| 893 | memcpy(lgr->peer_systemid, ini->peer_systemid, |
Karsten Graul | bc36d2f | 2019-04-12 12:57:26 +0200 | [diff] [blame] | 894 | SMC_SYSTEMID_LEN); |
Karsten Graul | e49300a | 2021-10-16 11:37:46 +0200 | [diff] [blame] | 895 | if (lgr->smc_version == SMC_V2) { |
| 896 | ibdev = ini->smcrv2.ib_dev_v2; |
| 897 | ibport = ini->smcrv2.ib_port_v2; |
| 898 | lgr->saddr = ini->smcrv2.saddr; |
| 899 | lgr->uses_gateway = ini->smcrv2.uses_gateway; |
| 900 | memcpy(lgr->nexthop_mac, ini->smcrv2.nexthop_mac, |
| 901 | ETH_ALEN); |
Guangguan Wang | 7f0620b | 2023-08-17 21:20:30 +0800 | [diff] [blame] | 902 | lgr->max_conns = ini->max_conns; |
Guangguan Wang | 69b888e | 2023-08-17 21:20:31 +0800 | [diff] [blame] | 903 | lgr->max_links = ini->max_links; |
Karsten Graul | e49300a | 2021-10-16 11:37:46 +0200 | [diff] [blame] | 904 | } else { |
| 905 | ibdev = ini->ib_dev; |
| 906 | ibport = ini->ib_port; |
Guangguan Wang | 7f0620b | 2023-08-17 21:20:30 +0800 | [diff] [blame] | 907 | lgr->max_conns = SMC_CONN_PER_LGR_MAX; |
Guangguan Wang | 69b888e | 2023-08-17 21:20:31 +0800 | [diff] [blame] | 908 | lgr->max_links = SMC_LINKS_ADD_LNK_MAX; |
Karsten Graul | e49300a | 2021-10-16 11:37:46 +0200 | [diff] [blame] | 909 | } |
| 910 | memcpy(lgr->pnet_id, ibdev->pnetid[ibport - 1], |
Karsten Graul | 35dcf7e | 2020-05-01 12:48:06 +0200 | [diff] [blame] | 911 | SMC_MAX_PNETID_LEN); |
Dan Carpenter | bdee15e | 2022-10-14 12:34:36 +0300 | [diff] [blame] | 912 | rc = smc_wr_alloc_lgr_mem(lgr); |
| 913 | if (rc) |
Karsten Graul | 8799e31 | 2021-10-16 11:37:49 +0200 | [diff] [blame] | 914 | goto free_wq; |
Karsten Graul | 00a049c | 2020-04-29 17:10:49 +0200 | [diff] [blame] | 915 | smc_llc_lgr_init(lgr, smc); |
| 916 | |
Karsten Graul | 026c381 | 2020-04-29 17:10:39 +0200 | [diff] [blame] | 917 | link_idx = SMC_SINGLE_LINK; |
| 918 | lnk = &lgr->lnk[link_idx]; |
| 919 | rc = smcr_link_init(lgr, lnk, link_idx, ini); |
Karsten Graul | 8799e31 | 2021-10-16 11:37:49 +0200 | [diff] [blame] | 920 | if (rc) { |
| 921 | smc_wr_free_lgr_mem(lgr); |
Karsten Graul | 22ef473 | 2020-09-10 18:48:29 +0200 | [diff] [blame] | 922 | goto free_wq; |
Karsten Graul | 8799e31 | 2021-10-16 11:37:49 +0200 | [diff] [blame] | 923 | } |
Tony Lu | 0237a3a | 2021-12-28 21:06:09 +0800 | [diff] [blame] | 924 | lgr->net = smc_ib_net(lnk->smcibdev); |
Ursula Braun | a2351c5 | 2019-10-09 10:07:43 +0200 | [diff] [blame] | 925 | lgr_list = &smc_lgr_list.list; |
Ursula Braun | a0a62ee | 2019-10-09 10:07:44 +0200 | [diff] [blame] | 926 | lgr_lock = &smc_lgr_list.lock; |
Wen Gu | b984f37 | 2022-07-14 17:44:03 +0800 | [diff] [blame] | 927 | lgr->buf_type = lgr->net->smc.sysctl_smcr_buf_type; |
Ursula Braun | 6dabd40 | 2019-11-16 17:47:29 +0100 | [diff] [blame] | 928 | atomic_inc(&lgr_cnt); |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 929 | } |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 930 | smc->conn.lgr = lgr; |
Ursula Braun | a0a62ee | 2019-10-09 10:07:44 +0200 | [diff] [blame] | 931 | spin_lock_bh(lgr_lock); |
Karsten Graul | a9e4450 | 2020-07-20 16:24:29 +0200 | [diff] [blame] | 932 | list_add_tail(&lgr->list, lgr_list); |
Ursula Braun | a0a62ee | 2019-10-09 10:07:44 +0200 | [diff] [blame] | 933 | spin_unlock_bh(lgr_lock); |
Ursula Braun | f38ba179 | 2017-01-09 16:55:19 +0100 | [diff] [blame] | 934 | return 0; |
| 935 | |
Karsten Graul | 22ef473 | 2020-09-10 18:48:29 +0200 | [diff] [blame] | 936 | free_wq: |
| 937 | destroy_workqueue(lgr->tx_wq); |
Ursula Braun | f38ba179 | 2017-01-09 16:55:19 +0100 | [diff] [blame] | 938 | free_lgr: |
| 939 | kfree(lgr); |
Ursula Braun | 29ee270 | 2019-10-10 10:16:09 +0200 | [diff] [blame] | 940 | ism_put_vlan: |
| 941 | if (ini->is_smcd && ini->vlan_id) |
Ursula Braun | 5c21c4c | 2020-09-26 12:44:29 +0200 | [diff] [blame] | 942 | smc_ism_put_vlan(ini->ism_dev[ini->ism_selected], ini->vlan_id); |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 943 | out: |
Karsten Graul | 7a62725a | 2019-04-12 12:57:30 +0200 | [diff] [blame] | 944 | if (rc < 0) { |
| 945 | if (rc == -ENOMEM) |
| 946 | rc = SMC_CLC_DECL_MEM; |
| 947 | else |
| 948 | rc = SMC_CLC_DECL_INTERR; |
| 949 | } |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 950 | return rc; |
| 951 | } |
| 952 | |
Karsten Graul | c6f02eb | 2020-05-04 14:18:38 +0200 | [diff] [blame] | 953 | static int smc_write_space(struct smc_connection *conn) |
| 954 | { |
| 955 | int buffer_len = conn->peer_rmbe_size; |
| 956 | union smc_host_cursor prod; |
| 957 | union smc_host_cursor cons; |
| 958 | int space; |
| 959 | |
| 960 | smc_curs_copy(&prod, &conn->local_tx_ctrl.prod, conn); |
| 961 | smc_curs_copy(&cons, &conn->local_rx_ctrl.cons, conn); |
| 962 | /* determine rx_buf space */ |
| 963 | space = buffer_len - smc_curs_diff(buffer_len, &cons, &prod); |
| 964 | return space; |
| 965 | } |
| 966 | |
Karsten Graul | b8ded9d | 2020-05-30 16:42:37 +0200 | [diff] [blame] | 967 | static int smc_switch_cursor(struct smc_sock *smc, struct smc_cdc_tx_pend *pend, |
| 968 | struct smc_wr_buf *wr_buf) |
Karsten Graul | c6f02eb | 2020-05-04 14:18:38 +0200 | [diff] [blame] | 969 | { |
| 970 | struct smc_connection *conn = &smc->conn; |
| 971 | union smc_host_cursor cons, fin; |
| 972 | int rc = 0; |
| 973 | int diff; |
| 974 | |
| 975 | smc_curs_copy(&conn->tx_curs_sent, &conn->tx_curs_fin, conn); |
| 976 | smc_curs_copy(&fin, &conn->local_tx_ctrl_fin, conn); |
| 977 | /* set prod cursor to old state, enforce tx_rdma_writes() */ |
| 978 | smc_curs_copy(&conn->local_tx_ctrl.prod, &fin, conn); |
| 979 | smc_curs_copy(&cons, &conn->local_rx_ctrl.cons, conn); |
| 980 | |
| 981 | if (smc_curs_comp(conn->peer_rmbe_size, &cons, &fin) < 0) { |
| 982 | /* cons cursor advanced more than fin, and prod was set |
| 983 | * fin above, so now prod is smaller than cons. Fix that. |
| 984 | */ |
| 985 | diff = smc_curs_diff(conn->peer_rmbe_size, &fin, &cons); |
| 986 | smc_curs_add(conn->sndbuf_desc->len, |
| 987 | &conn->tx_curs_sent, diff); |
| 988 | smc_curs_add(conn->sndbuf_desc->len, |
| 989 | &conn->tx_curs_fin, diff); |
| 990 | |
| 991 | smp_mb__before_atomic(); |
| 992 | atomic_add(diff, &conn->sndbuf_space); |
| 993 | smp_mb__after_atomic(); |
| 994 | |
| 995 | smc_curs_add(conn->peer_rmbe_size, |
| 996 | &conn->local_tx_ctrl.prod, diff); |
| 997 | smc_curs_add(conn->peer_rmbe_size, |
| 998 | &conn->local_tx_ctrl_fin, diff); |
| 999 | } |
| 1000 | /* recalculate, value is used by tx_rdma_writes() */ |
| 1001 | atomic_set(&smc->conn.peer_rmbe_space, smc_write_space(conn)); |
| 1002 | |
| 1003 | if (smc->sk.sk_state != SMC_INIT && |
| 1004 | smc->sk.sk_state != SMC_CLOSED) { |
Karsten Graul | b8ded9d | 2020-05-30 16:42:37 +0200 | [diff] [blame] | 1005 | rc = smcr_cdc_msg_send_validation(conn, pend, wr_buf); |
Karsten Graul | c6f02eb | 2020-05-04 14:18:38 +0200 | [diff] [blame] | 1006 | if (!rc) { |
Karsten Graul | 22ef473 | 2020-09-10 18:48:29 +0200 | [diff] [blame] | 1007 | queue_delayed_work(conn->lgr->tx_wq, &conn->tx_work, 0); |
Karsten Graul | c6f02eb | 2020-05-04 14:18:38 +0200 | [diff] [blame] | 1008 | smc->sk.sk_data_ready(&smc->sk); |
| 1009 | } |
Karsten Graul | b8ded9d | 2020-05-30 16:42:37 +0200 | [diff] [blame] | 1010 | } else { |
| 1011 | smc_wr_tx_put_slot(conn->lnk, |
| 1012 | (struct smc_wr_tx_pend_priv *)pend); |
Karsten Graul | c6f02eb | 2020-05-04 14:18:38 +0200 | [diff] [blame] | 1013 | } |
| 1014 | return rc; |
| 1015 | } |
| 1016 | |
Guvenc Gulce | 64513d2 | 2021-08-09 11:05:57 +0200 | [diff] [blame] | 1017 | void smc_switch_link_and_count(struct smc_connection *conn, |
| 1018 | struct smc_link *to_lnk) |
Guvenc Gulce | 07d5158 | 2020-12-01 20:20:38 +0100 | [diff] [blame] | 1019 | { |
| 1020 | atomic_dec(&conn->lnk->conn_cnt); |
Wen Gu | 20c9398 | 2022-01-13 16:36:42 +0800 | [diff] [blame] | 1021 | /* link_hold in smc_conn_create() */ |
| 1022 | smcr_link_put(conn->lnk); |
Guvenc Gulce | 07d5158 | 2020-12-01 20:20:38 +0100 | [diff] [blame] | 1023 | conn->lnk = to_lnk; |
| 1024 | atomic_inc(&conn->lnk->conn_cnt); |
Wen Gu | 20c9398 | 2022-01-13 16:36:42 +0800 | [diff] [blame] | 1025 | /* link_put in smc_conn_free() */ |
| 1026 | smcr_link_hold(conn->lnk); |
Guvenc Gulce | 07d5158 | 2020-12-01 20:20:38 +0100 | [diff] [blame] | 1027 | } |
| 1028 | |
Karsten Graul | c6f02eb | 2020-05-04 14:18:38 +0200 | [diff] [blame] | 1029 | struct smc_link *smc_switch_conns(struct smc_link_group *lgr, |
| 1030 | struct smc_link *from_lnk, bool is_dev_err) |
| 1031 | { |
| 1032 | struct smc_link *to_lnk = NULL; |
Karsten Graul | b8ded9d | 2020-05-30 16:42:37 +0200 | [diff] [blame] | 1033 | struct smc_cdc_tx_pend *pend; |
Karsten Graul | c6f02eb | 2020-05-04 14:18:38 +0200 | [diff] [blame] | 1034 | struct smc_connection *conn; |
Karsten Graul | b8ded9d | 2020-05-30 16:42:37 +0200 | [diff] [blame] | 1035 | struct smc_wr_buf *wr_buf; |
Karsten Graul | c6f02eb | 2020-05-04 14:18:38 +0200 | [diff] [blame] | 1036 | struct smc_sock *smc; |
| 1037 | struct rb_node *node; |
| 1038 | int i, rc = 0; |
| 1039 | |
| 1040 | /* link is inactive, wake up tx waiters */ |
| 1041 | smc_wr_wakeup_tx_wait(from_lnk); |
| 1042 | |
| 1043 | for (i = 0; i < SMC_LINKS_PER_LGR_MAX; i++) { |
Karsten Graul | 741a49a | 2020-07-18 15:06:16 +0200 | [diff] [blame] | 1044 | if (!smc_link_active(&lgr->lnk[i]) || i == from_lnk->link_idx) |
Karsten Graul | c6f02eb | 2020-05-04 14:18:38 +0200 | [diff] [blame] | 1045 | continue; |
| 1046 | if (is_dev_err && from_lnk->smcibdev == lgr->lnk[i].smcibdev && |
| 1047 | from_lnk->ibport == lgr->lnk[i].ibport) { |
| 1048 | continue; |
| 1049 | } |
| 1050 | to_lnk = &lgr->lnk[i]; |
| 1051 | break; |
| 1052 | } |
Karsten Graul | 95f7f3e | 2021-10-07 16:14:40 +0200 | [diff] [blame] | 1053 | if (!to_lnk || !smc_wr_tx_link_hold(to_lnk)) { |
Karsten Graul | c6f02eb | 2020-05-04 14:18:38 +0200 | [diff] [blame] | 1054 | smc_lgr_terminate_sched(lgr); |
| 1055 | return NULL; |
| 1056 | } |
| 1057 | again: |
| 1058 | read_lock_bh(&lgr->conns_lock); |
| 1059 | for (node = rb_first(&lgr->conns_all); node; node = rb_next(node)) { |
| 1060 | conn = rb_entry(node, struct smc_connection, alert_node); |
| 1061 | if (conn->lnk != from_lnk) |
| 1062 | continue; |
| 1063 | smc = container_of(conn, struct smc_sock, conn); |
| 1064 | /* conn->lnk not yet set in SMC_INIT state */ |
| 1065 | if (smc->sk.sk_state == SMC_INIT) |
| 1066 | continue; |
| 1067 | if (smc->sk.sk_state == SMC_CLOSED || |
| 1068 | smc->sk.sk_state == SMC_PEERCLOSEWAIT1 || |
| 1069 | smc->sk.sk_state == SMC_PEERCLOSEWAIT2 || |
| 1070 | smc->sk.sk_state == SMC_APPFINCLOSEWAIT || |
| 1071 | smc->sk.sk_state == SMC_APPCLOSEWAIT1 || |
| 1072 | smc->sk.sk_state == SMC_APPCLOSEWAIT2 || |
| 1073 | smc->sk.sk_state == SMC_PEERFINCLOSEWAIT || |
| 1074 | smc->sk.sk_state == SMC_PEERABORTWAIT || |
| 1075 | smc->sk.sk_state == SMC_PROCESSABORT) { |
| 1076 | spin_lock_bh(&conn->send_lock); |
Guvenc Gulce | 07d5158 | 2020-12-01 20:20:38 +0100 | [diff] [blame] | 1077 | smc_switch_link_and_count(conn, to_lnk); |
Karsten Graul | c6f02eb | 2020-05-04 14:18:38 +0200 | [diff] [blame] | 1078 | spin_unlock_bh(&conn->send_lock); |
| 1079 | continue; |
| 1080 | } |
| 1081 | sock_hold(&smc->sk); |
| 1082 | read_unlock_bh(&lgr->conns_lock); |
Karsten Graul | b8ded9d | 2020-05-30 16:42:37 +0200 | [diff] [blame] | 1083 | /* pre-fetch buffer outside of send_lock, might sleep */ |
| 1084 | rc = smc_cdc_get_free_slot(conn, to_lnk, &wr_buf, NULL, &pend); |
Karsten Graul | 95f7f3e | 2021-10-07 16:14:40 +0200 | [diff] [blame] | 1085 | if (rc) |
| 1086 | goto err_out; |
Karsten Graul | c6f02eb | 2020-05-04 14:18:38 +0200 | [diff] [blame] | 1087 | /* avoid race with smcr_tx_sndbuf_nonempty() */ |
| 1088 | spin_lock_bh(&conn->send_lock); |
Guvenc Gulce | 07d5158 | 2020-12-01 20:20:38 +0100 | [diff] [blame] | 1089 | smc_switch_link_and_count(conn, to_lnk); |
Karsten Graul | b8ded9d | 2020-05-30 16:42:37 +0200 | [diff] [blame] | 1090 | rc = smc_switch_cursor(smc, pend, wr_buf); |
Karsten Graul | c6f02eb | 2020-05-04 14:18:38 +0200 | [diff] [blame] | 1091 | spin_unlock_bh(&conn->send_lock); |
| 1092 | sock_put(&smc->sk); |
Karsten Graul | 95f7f3e | 2021-10-07 16:14:40 +0200 | [diff] [blame] | 1093 | if (rc) |
| 1094 | goto err_out; |
Karsten Graul | c6f02eb | 2020-05-04 14:18:38 +0200 | [diff] [blame] | 1095 | goto again; |
| 1096 | } |
| 1097 | read_unlock_bh(&lgr->conns_lock); |
Karsten Graul | 95f7f3e | 2021-10-07 16:14:40 +0200 | [diff] [blame] | 1098 | smc_wr_tx_link_put(to_lnk); |
Karsten Graul | c6f02eb | 2020-05-04 14:18:38 +0200 | [diff] [blame] | 1099 | return to_lnk; |
Karsten Graul | 95f7f3e | 2021-10-07 16:14:40 +0200 | [diff] [blame] | 1100 | |
| 1101 | err_out: |
| 1102 | smcr_link_down_cond_sched(to_lnk); |
| 1103 | smc_wr_tx_link_put(to_lnk); |
| 1104 | return NULL; |
Karsten Graul | c6f02eb | 2020-05-04 14:18:38 +0200 | [diff] [blame] | 1105 | } |
| 1106 | |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 1107 | static void smcr_buf_unuse(struct smc_buf_desc *buf_desc, bool is_rmb, |
Karsten Graul | 6d74c3a | 2020-04-30 15:55:45 +0200 | [diff] [blame] | 1108 | struct smc_link_group *lgr) |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 1109 | { |
D. Wythe | aff7bfe | 2023-02-02 16:26:42 +0800 | [diff] [blame] | 1110 | struct rw_semaphore *lock; /* lock buffer list */ |
Karsten Graul | d550066 | 2020-05-01 12:48:05 +0200 | [diff] [blame] | 1111 | int rc; |
| 1112 | |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 1113 | if (is_rmb && buf_desc->is_conf_rkey && !list_empty(&lgr->list)) { |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 1114 | /* unregister rmb with peer */ |
Karsten Graul | d550066 | 2020-05-01 12:48:05 +0200 | [diff] [blame] | 1115 | rc = smc_llc_flow_initiate(lgr, SMC_LLC_FLOW_RKEY); |
| 1116 | if (!rc) { |
| 1117 | /* protect against smc_llc_cli_rkey_exchange() */ |
D. Wythe | f642101 | 2023-02-02 16:26:40 +0800 | [diff] [blame] | 1118 | down_read(&lgr->llc_conf_mutex); |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 1119 | smc_llc_do_delete_rkey(lgr, buf_desc); |
| 1120 | buf_desc->is_conf_rkey = false; |
D. Wythe | f642101 | 2023-02-02 16:26:40 +0800 | [diff] [blame] | 1121 | up_read(&lgr->llc_conf_mutex); |
Karsten Graul | d550066 | 2020-05-01 12:48:05 +0200 | [diff] [blame] | 1122 | smc_llc_flow_stop(lgr, &lgr->llc_flow_lcl); |
| 1123 | } |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 1124 | } |
Karsten Graul | d550066 | 2020-05-01 12:48:05 +0200 | [diff] [blame] | 1125 | |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 1126 | if (buf_desc->is_reg_err) { |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 1127 | /* buf registration failed, reuse not possible */ |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 1128 | lock = is_rmb ? &lgr->rmbs_lock : |
| 1129 | &lgr->sndbufs_lock; |
D. Wythe | aff7bfe | 2023-02-02 16:26:42 +0800 | [diff] [blame] | 1130 | down_write(lock); |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 1131 | list_del(&buf_desc->list); |
D. Wythe | aff7bfe | 2023-02-02 16:26:42 +0800 | [diff] [blame] | 1132 | up_write(lock); |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 1133 | |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 1134 | smc_buf_free(lgr, is_rmb, buf_desc); |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 1135 | } else { |
D. Wythe | 475f9ff | 2023-02-16 14:39:05 +0800 | [diff] [blame] | 1136 | /* memzero_explicit provides potential memory barrier semantics */ |
| 1137 | memzero_explicit(buf_desc->cpu_addr, buf_desc->len); |
| 1138 | WRITE_ONCE(buf_desc->used, 0); |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 1139 | } |
| 1140 | } |
| 1141 | |
Karsten Graul | fb692ec | 2018-10-25 13:25:28 +0200 | [diff] [blame] | 1142 | static void smc_buf_unuse(struct smc_connection *conn, |
| 1143 | struct smc_link_group *lgr) |
Ursula Braun | cd6851f | 2017-01-09 16:55:18 +0100 | [diff] [blame] | 1144 | { |
Tony Lu | 1c55269 | 2021-12-03 12:33:31 +0100 | [diff] [blame] | 1145 | if (conn->sndbuf_desc) { |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 1146 | if (!lgr->is_smcd && conn->sndbuf_desc->is_vm) { |
| 1147 | smcr_buf_unuse(conn->sndbuf_desc, false, lgr); |
| 1148 | } else { |
D. Wythe | 475f9ff | 2023-02-16 14:39:05 +0800 | [diff] [blame] | 1149 | memzero_explicit(conn->sndbuf_desc->cpu_addr, conn->sndbuf_desc->len); |
| 1150 | WRITE_ONCE(conn->sndbuf_desc->used, 0); |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 1151 | } |
Tony Lu | 1c55269 | 2021-12-03 12:33:31 +0100 | [diff] [blame] | 1152 | } |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 1153 | if (conn->rmb_desc) { |
| 1154 | if (!lgr->is_smcd) { |
| 1155 | smcr_buf_unuse(conn->rmb_desc, true, lgr); |
| 1156 | } else { |
D. Wythe | 475f9ff | 2023-02-16 14:39:05 +0800 | [diff] [blame] | 1157 | memzero_explicit(conn->rmb_desc->cpu_addr, |
| 1158 | conn->rmb_desc->len + sizeof(struct smcd_cdc_msg)); |
| 1159 | WRITE_ONCE(conn->rmb_desc->used, 0); |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 1160 | } |
Tony Lu | 1c55269 | 2021-12-03 12:33:31 +0100 | [diff] [blame] | 1161 | } |
Ursula Braun | cd6851f | 2017-01-09 16:55:18 +0100 | [diff] [blame] | 1162 | } |
| 1163 | |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1164 | /* remove a finished connection from its link group */ |
| 1165 | void smc_conn_free(struct smc_connection *conn) |
| 1166 | { |
Karsten Graul | fb692ec | 2018-10-25 13:25:28 +0200 | [diff] [blame] | 1167 | struct smc_link_group *lgr = conn->lgr; |
| 1168 | |
Wen Gu | 61f434b | 2022-01-13 16:36:40 +0800 | [diff] [blame] | 1169 | if (!lgr || conn->freed) |
| 1170 | /* Connection has never been registered in a |
| 1171 | * link group, or has already been freed. |
| 1172 | */ |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1173 | return; |
Wen Gu | 61f434b | 2022-01-13 16:36:40 +0800 | [diff] [blame] | 1174 | |
| 1175 | conn->freed = 1; |
Wen Gu | ea89c6c | 2022-01-13 16:36:41 +0800 | [diff] [blame] | 1176 | if (!smc_conn_lgr_valid(conn)) |
Wen Gu | 61f434b | 2022-01-13 16:36:40 +0800 | [diff] [blame] | 1177 | /* Connection has already unregistered from |
| 1178 | * link group. |
| 1179 | */ |
| 1180 | goto lgr_put; |
| 1181 | |
Karsten Graul | fb692ec | 2018-10-25 13:25:28 +0200 | [diff] [blame] | 1182 | if (lgr->is_smcd) { |
Ursula Braun | 42bfba9 | 2019-11-14 13:02:41 +0100 | [diff] [blame] | 1183 | if (!list_empty(&lgr->list)) |
| 1184 | smc_ism_unset_conn(conn); |
Hans Wippel | be244f2 | 2018-06-28 19:05:10 +0200 | [diff] [blame] | 1185 | tasklet_kill(&conn->rx_tsklet); |
| 1186 | } else { |
Dust Li | 349d431 | 2021-12-28 17:03:25 +0800 | [diff] [blame] | 1187 | smc_cdc_wait_pend_tx_wr(conn); |
Karsten Graul | b286a06 | 2020-05-04 14:18:40 +0200 | [diff] [blame] | 1188 | if (current_work() != &conn->abort_work) |
| 1189 | cancel_work_sync(&conn->abort_work); |
Hans Wippel | be244f2 | 2018-06-28 19:05:10 +0200 | [diff] [blame] | 1190 | } |
Ursula Braun | 2a0674ff | 2019-10-21 16:13:13 +0200 | [diff] [blame] | 1191 | if (!list_empty(&lgr->list)) { |
Ursula Braun | 2a0674ff | 2019-10-21 16:13:13 +0200 | [diff] [blame] | 1192 | smc_buf_unuse(conn, lgr); /* allow buffer reuse */ |
D. Wythe | 0537f0a | 2022-03-02 21:25:11 +0800 | [diff] [blame] | 1193 | smc_lgr_unregister_conn(conn); |
Ursula Braun | 2a0674ff | 2019-10-21 16:13:13 +0200 | [diff] [blame] | 1194 | } |
Karsten Graul | fb692ec | 2018-10-25 13:25:28 +0200 | [diff] [blame] | 1195 | |
| 1196 | if (!lgr->conns_num) |
| 1197 | smc_lgr_schedule_free_work(lgr); |
Wen Gu | 61f434b | 2022-01-13 16:36:40 +0800 | [diff] [blame] | 1198 | lgr_put: |
Wen Gu | 20c9398 | 2022-01-13 16:36:42 +0800 | [diff] [blame] | 1199 | if (!lgr->is_smcd) |
| 1200 | smcr_link_put(conn->lnk); /* link_hold in smc_conn_create() */ |
Wen Gu | 61f434b | 2022-01-13 16:36:40 +0800 | [diff] [blame] | 1201 | smc_lgr_put(lgr); /* lgr_hold in smc_conn_create() */ |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1202 | } |
| 1203 | |
Karsten Graul | 4a3641c | 2020-05-01 12:48:02 +0200 | [diff] [blame] | 1204 | /* unregister a link from a buf_desc */ |
| 1205 | static void smcr_buf_unmap_link(struct smc_buf_desc *buf_desc, bool is_rmb, |
| 1206 | struct smc_link *lnk) |
| 1207 | { |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 1208 | if (is_rmb || buf_desc->is_vm) |
Karsten Graul | 4a3641c | 2020-05-01 12:48:02 +0200 | [diff] [blame] | 1209 | buf_desc->is_reg_mr[lnk->link_idx] = false; |
| 1210 | if (!buf_desc->is_map_ib[lnk->link_idx]) |
| 1211 | return; |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 1212 | |
| 1213 | if ((is_rmb || buf_desc->is_vm) && |
| 1214 | buf_desc->mr[lnk->link_idx]) { |
| 1215 | smc_ib_put_memory_region(buf_desc->mr[lnk->link_idx]); |
| 1216 | buf_desc->mr[lnk->link_idx] = NULL; |
Karsten Graul | 4a3641c | 2020-05-01 12:48:02 +0200 | [diff] [blame] | 1217 | } |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 1218 | if (is_rmb) |
| 1219 | smc_ib_buf_unmap_sg(lnk, buf_desc, DMA_FROM_DEVICE); |
| 1220 | else |
| 1221 | smc_ib_buf_unmap_sg(lnk, buf_desc, DMA_TO_DEVICE); |
| 1222 | |
Karsten Graul | 4a3641c | 2020-05-01 12:48:02 +0200 | [diff] [blame] | 1223 | sg_free_table(&buf_desc->sgt[lnk->link_idx]); |
| 1224 | buf_desc->is_map_ib[lnk->link_idx] = false; |
| 1225 | } |
| 1226 | |
| 1227 | /* unmap all buffers of lgr for a deleted link */ |
| 1228 | static void smcr_buf_unmap_lgr(struct smc_link *lnk) |
| 1229 | { |
| 1230 | struct smc_link_group *lgr = lnk->lgr; |
| 1231 | struct smc_buf_desc *buf_desc, *bf; |
| 1232 | int i; |
| 1233 | |
| 1234 | for (i = 0; i < SMC_RMBE_SIZES; i++) { |
D. Wythe | aff7bfe | 2023-02-02 16:26:42 +0800 | [diff] [blame] | 1235 | down_write(&lgr->rmbs_lock); |
Karsten Graul | 4a3641c | 2020-05-01 12:48:02 +0200 | [diff] [blame] | 1236 | list_for_each_entry_safe(buf_desc, bf, &lgr->rmbs[i], list) |
| 1237 | smcr_buf_unmap_link(buf_desc, true, lnk); |
D. Wythe | aff7bfe | 2023-02-02 16:26:42 +0800 | [diff] [blame] | 1238 | up_write(&lgr->rmbs_lock); |
| 1239 | |
| 1240 | down_write(&lgr->sndbufs_lock); |
Karsten Graul | 4a3641c | 2020-05-01 12:48:02 +0200 | [diff] [blame] | 1241 | list_for_each_entry_safe(buf_desc, bf, &lgr->sndbufs[i], |
| 1242 | list) |
| 1243 | smcr_buf_unmap_link(buf_desc, false, lnk); |
D. Wythe | aff7bfe | 2023-02-02 16:26:42 +0800 | [diff] [blame] | 1244 | up_write(&lgr->sndbufs_lock); |
Karsten Graul | 4a3641c | 2020-05-01 12:48:02 +0200 | [diff] [blame] | 1245 | } |
| 1246 | } |
| 1247 | |
| 1248 | static void smcr_rtoken_clear_link(struct smc_link *lnk) |
| 1249 | { |
| 1250 | struct smc_link_group *lgr = lnk->lgr; |
| 1251 | int i; |
| 1252 | |
| 1253 | for (i = 0; i < SMC_RMBS_PER_LGR_MAX; i++) { |
| 1254 | lgr->rtokens[i][lnk->link_idx].rkey = 0; |
| 1255 | lgr->rtokens[i][lnk->link_idx].dma_addr = 0; |
| 1256 | } |
| 1257 | } |
| 1258 | |
Wen Gu | 20c9398 | 2022-01-13 16:36:42 +0800 | [diff] [blame] | 1259 | static void __smcr_link_clear(struct smc_link *lnk) |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1260 | { |
Wen Gu | 61f434b | 2022-01-13 16:36:40 +0800 | [diff] [blame] | 1261 | struct smc_link_group *lgr = lnk->lgr; |
Karsten Graul | d854fcb | 2020-04-29 17:10:43 +0200 | [diff] [blame] | 1262 | struct smc_ib_device *smcibdev; |
| 1263 | |
Ursula Braun | f38ba179 | 2017-01-09 16:55:19 +0100 | [diff] [blame] | 1264 | smc_wr_free_link_mem(lnk); |
Guvenc Gulce | ddc9928 | 2020-12-01 20:20:39 +0100 | [diff] [blame] | 1265 | smc_ibdev_cnt_dec(lnk); |
Karsten Graul | f3c1ded | 2020-04-29 17:10:38 +0200 | [diff] [blame] | 1266 | put_device(&lnk->smcibdev->ibdev->dev); |
Karsten Graul | d854fcb | 2020-04-29 17:10:43 +0200 | [diff] [blame] | 1267 | smcibdev = lnk->smcibdev; |
| 1268 | memset(lnk, 0, sizeof(struct smc_link)); |
| 1269 | lnk->state = SMC_LNK_UNUSED; |
| 1270 | if (!atomic_dec_return(&smcibdev->lnk_cnt)) |
| 1271 | wake_up(&smcibdev->lnks_deleted); |
Wen Gu | 61f434b | 2022-01-13 16:36:40 +0800 | [diff] [blame] | 1272 | smc_lgr_put(lgr); /* lgr_hold in smcr_link_init() */ |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1273 | } |
| 1274 | |
Wen Gu | 20c9398 | 2022-01-13 16:36:42 +0800 | [diff] [blame] | 1275 | /* must be called under lgr->llc_conf_mutex lock */ |
| 1276 | void smcr_link_clear(struct smc_link *lnk, bool log) |
| 1277 | { |
| 1278 | if (!lnk->lgr || lnk->clearing || |
| 1279 | lnk->state == SMC_LNK_UNUSED) |
| 1280 | return; |
| 1281 | lnk->clearing = 1; |
| 1282 | lnk->peer_qpn = 0; |
| 1283 | smc_llc_link_clear(lnk, log); |
| 1284 | smcr_buf_unmap_lgr(lnk); |
| 1285 | smcr_rtoken_clear_link(lnk); |
| 1286 | smc_ib_modify_qp_error(lnk); |
| 1287 | smc_wr_free_link(lnk); |
| 1288 | smc_ib_destroy_queue_pair(lnk); |
| 1289 | smc_ib_dealloc_protection_domain(lnk); |
| 1290 | smcr_link_put(lnk); /* theoretically last link_put */ |
| 1291 | } |
| 1292 | |
| 1293 | void smcr_link_hold(struct smc_link *lnk) |
| 1294 | { |
| 1295 | refcount_inc(&lnk->refcnt); |
| 1296 | } |
| 1297 | |
| 1298 | void smcr_link_put(struct smc_link *lnk) |
| 1299 | { |
| 1300 | if (refcount_dec_and_test(&lnk->refcnt)) |
| 1301 | __smcr_link_clear(lnk); |
| 1302 | } |
| 1303 | |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 1304 | static void smcr_buf_free(struct smc_link_group *lgr, bool is_rmb, |
| 1305 | struct smc_buf_desc *buf_desc) |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 1306 | { |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 1307 | int i; |
Hans Wippel | 6511aad | 2018-05-18 09:34:17 +0200 | [diff] [blame] | 1308 | |
Karsten Graul | 4a3641c | 2020-05-01 12:48:02 +0200 | [diff] [blame] | 1309 | for (i = 0; i < SMC_LINKS_PER_LGR_MAX; i++) |
| 1310 | smcr_buf_unmap_link(buf_desc, is_rmb, &lgr->lnk[i]); |
Karsten Graul | 387707f | 2020-04-29 17:10:40 +0200 | [diff] [blame] | 1311 | |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 1312 | if (!buf_desc->is_vm && buf_desc->pages) |
Stefan Raspl | 2ef4f27 | 2018-05-03 18:12:38 +0200 | [diff] [blame] | 1313 | __free_pages(buf_desc->pages, buf_desc->order); |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 1314 | else if (buf_desc->is_vm && buf_desc->cpu_addr) |
| 1315 | vfree(buf_desc->cpu_addr); |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 1316 | kfree(buf_desc); |
| 1317 | } |
| 1318 | |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 1319 | static void smcd_buf_free(struct smc_link_group *lgr, bool is_dmb, |
| 1320 | struct smc_buf_desc *buf_desc) |
| 1321 | { |
Hans Wippel | be244f2 | 2018-06-28 19:05:10 +0200 | [diff] [blame] | 1322 | if (is_dmb) { |
| 1323 | /* restore original buf len */ |
| 1324 | buf_desc->len += sizeof(struct smcd_cdc_msg); |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 1325 | smc_ism_unregister_dmb(lgr->smcd, buf_desc); |
Hans Wippel | be244f2 | 2018-06-28 19:05:10 +0200 | [diff] [blame] | 1326 | } else { |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 1327 | kfree(buf_desc->cpu_addr); |
Hans Wippel | be244f2 | 2018-06-28 19:05:10 +0200 | [diff] [blame] | 1328 | } |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 1329 | kfree(buf_desc); |
| 1330 | } |
| 1331 | |
| 1332 | static void smc_buf_free(struct smc_link_group *lgr, bool is_rmb, |
| 1333 | struct smc_buf_desc *buf_desc) |
| 1334 | { |
| 1335 | if (lgr->is_smcd) |
| 1336 | smcd_buf_free(lgr, is_rmb, buf_desc); |
| 1337 | else |
| 1338 | smcr_buf_free(lgr, is_rmb, buf_desc); |
| 1339 | } |
| 1340 | |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 1341 | static void __smc_lgr_free_bufs(struct smc_link_group *lgr, bool is_rmb) |
Ursula Braun | cd6851f | 2017-01-09 16:55:18 +0100 | [diff] [blame] | 1342 | { |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 1343 | struct smc_buf_desc *buf_desc, *bf_desc; |
| 1344 | struct list_head *buf_list; |
Ursula Braun | cd6851f | 2017-01-09 16:55:18 +0100 | [diff] [blame] | 1345 | int i; |
| 1346 | |
| 1347 | for (i = 0; i < SMC_RMBE_SIZES; i++) { |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 1348 | if (is_rmb) |
| 1349 | buf_list = &lgr->rmbs[i]; |
| 1350 | else |
| 1351 | buf_list = &lgr->sndbufs[i]; |
| 1352 | list_for_each_entry_safe(buf_desc, bf_desc, buf_list, |
Ursula Braun | cd6851f | 2017-01-09 16:55:18 +0100 | [diff] [blame] | 1353 | list) { |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 1354 | list_del(&buf_desc->list); |
Hans Wippel | 6511aad | 2018-05-18 09:34:17 +0200 | [diff] [blame] | 1355 | smc_buf_free(lgr, is_rmb, buf_desc); |
Ursula Braun | cd6851f | 2017-01-09 16:55:18 +0100 | [diff] [blame] | 1356 | } |
| 1357 | } |
| 1358 | } |
| 1359 | |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 1360 | static void smc_lgr_free_bufs(struct smc_link_group *lgr) |
Ursula Braun | cd6851f | 2017-01-09 16:55:18 +0100 | [diff] [blame] | 1361 | { |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 1362 | /* free send buffers */ |
| 1363 | __smc_lgr_free_bufs(lgr, false); |
| 1364 | /* free rmbs */ |
| 1365 | __smc_lgr_free_bufs(lgr, true); |
Ursula Braun | cd6851f | 2017-01-09 16:55:18 +0100 | [diff] [blame] | 1366 | } |
| 1367 | |
Wen Gu | 61f434b | 2022-01-13 16:36:40 +0800 | [diff] [blame] | 1368 | /* won't be freed until no one accesses to lgr anymore */ |
| 1369 | static void __smc_lgr_free(struct smc_link_group *lgr) |
| 1370 | { |
| 1371 | smc_lgr_free_bufs(lgr); |
| 1372 | if (lgr->is_smcd) { |
| 1373 | if (!atomic_dec_return(&lgr->smcd->lgr_cnt)) |
| 1374 | wake_up(&lgr->smcd->lgrs_deleted); |
| 1375 | } else { |
| 1376 | smc_wr_free_lgr_mem(lgr); |
| 1377 | if (!atomic_dec_return(&lgr_cnt)) |
| 1378 | wake_up(&lgrs_deleted); |
| 1379 | } |
| 1380 | kfree(lgr); |
| 1381 | } |
| 1382 | |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1383 | /* remove a link group */ |
Ursula Braun | 3f3f0e3 | 2018-11-22 10:26:35 +0100 | [diff] [blame] | 1384 | static void smc_lgr_free(struct smc_link_group *lgr) |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1385 | { |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 1386 | int i; |
| 1387 | |
Karsten Graul | a52bcc9 | 2020-05-04 14:18:46 +0200 | [diff] [blame] | 1388 | if (!lgr->is_smcd) { |
D. Wythe | b5dd4d6 | 2023-02-02 16:26:39 +0800 | [diff] [blame] | 1389 | down_write(&lgr->llc_conf_mutex); |
Karsten Graul | a52bcc9 | 2020-05-04 14:18:46 +0200 | [diff] [blame] | 1390 | for (i = 0; i < SMC_LINKS_PER_LGR_MAX; i++) { |
| 1391 | if (lgr->lnk[i].state != SMC_LNK_UNUSED) |
Karsten Graul | 0a99be4 | 2020-05-05 15:01:20 +0200 | [diff] [blame] | 1392 | smcr_link_clear(&lgr->lnk[i], false); |
Karsten Graul | a52bcc9 | 2020-05-04 14:18:46 +0200 | [diff] [blame] | 1393 | } |
D. Wythe | b5dd4d6 | 2023-02-02 16:26:39 +0800 | [diff] [blame] | 1394 | up_write(&lgr->llc_conf_mutex); |
Karsten Graul | a52bcc9 | 2020-05-04 14:18:46 +0200 | [diff] [blame] | 1395 | smc_llc_lgr_clear(lgr); |
| 1396 | } |
| 1397 | |
Karsten Graul | 22ef473 | 2020-09-10 18:48:29 +0200 | [diff] [blame] | 1398 | destroy_workqueue(lgr->tx_wq); |
Ursula Braun | b3cb53c | 2019-10-09 10:07:45 +0200 | [diff] [blame] | 1399 | if (lgr->is_smcd) { |
Ursula Braun | f9aab6f | 2020-09-10 18:48:26 +0200 | [diff] [blame] | 1400 | smc_ism_put_vlan(lgr->smcd, lgr->vlan_id); |
Stefan Raspl | 8c81ba2 | 2023-01-23 19:17:52 +0100 | [diff] [blame] | 1401 | put_device(lgr->smcd->ops->get_dev(lgr->smcd)); |
Ursula Braun | b3cb53c | 2019-10-09 10:07:45 +0200 | [diff] [blame] | 1402 | } |
Wen Gu | 61f434b | 2022-01-13 16:36:40 +0800 | [diff] [blame] | 1403 | smc_lgr_put(lgr); /* theoretically last lgr_put */ |
| 1404 | } |
| 1405 | |
| 1406 | void smc_lgr_hold(struct smc_link_group *lgr) |
| 1407 | { |
| 1408 | refcount_inc(&lgr->refcnt); |
| 1409 | } |
| 1410 | |
| 1411 | void smc_lgr_put(struct smc_link_group *lgr) |
| 1412 | { |
| 1413 | if (refcount_dec_and_test(&lgr->refcnt)) |
| 1414 | __smc_lgr_free(lgr); |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1415 | } |
| 1416 | |
Ursula Braun | 2a0674ff | 2019-10-21 16:13:13 +0200 | [diff] [blame] | 1417 | static void smc_sk_wake_ups(struct smc_sock *smc) |
| 1418 | { |
| 1419 | smc->sk.sk_write_space(&smc->sk); |
| 1420 | smc->sk.sk_data_ready(&smc->sk); |
| 1421 | smc->sk.sk_state_change(&smc->sk); |
| 1422 | } |
| 1423 | |
| 1424 | /* kill a connection */ |
Ursula Braun | 5421ec2 | 2019-11-14 13:02:42 +0100 | [diff] [blame] | 1425 | static void smc_conn_kill(struct smc_connection *conn, bool soft) |
Ursula Braun | 2a0674ff | 2019-10-21 16:13:13 +0200 | [diff] [blame] | 1426 | { |
| 1427 | struct smc_sock *smc = container_of(conn, struct smc_sock, conn); |
| 1428 | |
Ursula Braun | 50c6b20 | 2019-11-14 13:02:40 +0100 | [diff] [blame] | 1429 | if (conn->lgr->is_smcd && conn->lgr->peer_shutdown) |
| 1430 | conn->local_tx_ctrl.conn_state_flags.peer_conn_abort = 1; |
| 1431 | else |
| 1432 | smc_close_abort(conn); |
Ursula Braun | 2a0674ff | 2019-10-21 16:13:13 +0200 | [diff] [blame] | 1433 | conn->killed = 1; |
Ursula Braun | 2a0674ff | 2019-10-21 16:13:13 +0200 | [diff] [blame] | 1434 | smc->sk.sk_err = ECONNABORTED; |
Ursula Braun | 50c6b20 | 2019-11-14 13:02:40 +0100 | [diff] [blame] | 1435 | smc_sk_wake_ups(smc); |
Ursula Braun | 42bfba9 | 2019-11-14 13:02:41 +0100 | [diff] [blame] | 1436 | if (conn->lgr->is_smcd) { |
| 1437 | smc_ism_unset_conn(conn); |
Ursula Braun | 5421ec2 | 2019-11-14 13:02:42 +0100 | [diff] [blame] | 1438 | if (soft) |
| 1439 | tasklet_kill(&conn->rx_tsklet); |
| 1440 | else |
| 1441 | tasklet_unlock_wait(&conn->rx_tsklet); |
Ursula Braun | 6a37ad3 | 2019-11-14 13:02:46 +0100 | [diff] [blame] | 1442 | } else { |
Dust Li | 349d431 | 2021-12-28 17:03:25 +0800 | [diff] [blame] | 1443 | smc_cdc_wait_pend_tx_wr(conn); |
Ursula Braun | 42bfba9 | 2019-11-14 13:02:41 +0100 | [diff] [blame] | 1444 | } |
Ursula Braun | 50c6b20 | 2019-11-14 13:02:40 +0100 | [diff] [blame] | 1445 | smc_lgr_unregister_conn(conn); |
Ursula Braun | 81cf4f4 | 2019-10-21 16:13:15 +0200 | [diff] [blame] | 1446 | smc_close_active_abort(smc); |
Ursula Braun | 2a0674ff | 2019-10-21 16:13:13 +0200 | [diff] [blame] | 1447 | } |
| 1448 | |
Ursula Braun | 42bfba9 | 2019-11-14 13:02:41 +0100 | [diff] [blame] | 1449 | static void smc_lgr_cleanup(struct smc_link_group *lgr) |
| 1450 | { |
| 1451 | if (lgr->is_smcd) { |
| 1452 | smc_ism_signal_shutdown(lgr); |
Ursula Braun | 42bfba9 | 2019-11-14 13:02:41 +0100 | [diff] [blame] | 1453 | } else { |
Karsten Graul | 3e0c40a | 2020-05-04 14:18:45 +0200 | [diff] [blame] | 1454 | u32 rsn = lgr->llc_termination_rsn; |
| 1455 | |
| 1456 | if (!rsn) |
| 1457 | rsn = SMC_LLC_DEL_PROG_INIT_TERM; |
| 1458 | smc_llc_send_link_delete_all(lgr, false, rsn); |
Karsten Graul | a52bcc9 | 2020-05-04 14:18:46 +0200 | [diff] [blame] | 1459 | smcr_lgr_link_deactivate_all(lgr); |
Ursula Braun | 42bfba9 | 2019-11-14 13:02:41 +0100 | [diff] [blame] | 1460 | } |
| 1461 | } |
| 1462 | |
Karsten Graul | ba95206 | 2020-02-17 16:24:53 +0100 | [diff] [blame] | 1463 | /* terminate link group |
| 1464 | * @soft: true if link group shutdown can take its time |
| 1465 | * false if immediate link group shutdown is required |
| 1466 | */ |
Ursula Braun | 5421ec2 | 2019-11-14 13:02:42 +0100 | [diff] [blame] | 1467 | static void __smc_lgr_terminate(struct smc_link_group *lgr, bool soft) |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1468 | { |
| 1469 | struct smc_connection *conn; |
Ursula Braun | b38d732 | 2017-01-09 16:55:25 +0100 | [diff] [blame] | 1470 | struct smc_sock *smc; |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1471 | struct rb_node *node; |
| 1472 | |
Karsten Graul | 517c300 | 2018-05-15 17:05:03 +0200 | [diff] [blame] | 1473 | if (lgr->terminating) |
| 1474 | return; /* lgr already terminating */ |
Karsten Graul | a52bcc9 | 2020-05-04 14:18:46 +0200 | [diff] [blame] | 1475 | /* cancel free_work sync, will terminate when lgr->freeing is set */ |
Wenjia Zhang | 13085e1 | 2023-03-13 11:08:28 +0100 | [diff] [blame] | 1476 | cancel_delayed_work(&lgr->free_work); |
Karsten Graul | 517c300 | 2018-05-15 17:05:03 +0200 | [diff] [blame] | 1477 | lgr->terminating = 1; |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1478 | |
Ursula Braun | 69318b5 | 2019-10-21 16:13:10 +0200 | [diff] [blame] | 1479 | /* kill remaining link group connections */ |
| 1480 | read_lock_bh(&lgr->conns_lock); |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1481 | node = rb_first(&lgr->conns_all); |
| 1482 | while (node) { |
Ursula Braun | 69318b5 | 2019-10-21 16:13:10 +0200 | [diff] [blame] | 1483 | read_unlock_bh(&lgr->conns_lock); |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1484 | conn = rb_entry(node, struct smc_connection, alert_node); |
Ursula Braun | b38d732 | 2017-01-09 16:55:25 +0100 | [diff] [blame] | 1485 | smc = container_of(conn, struct smc_sock, conn); |
Ursula Braun | 81cf4f4 | 2019-10-21 16:13:15 +0200 | [diff] [blame] | 1486 | sock_hold(&smc->sk); /* sock_put below */ |
Ursula Braun | 69318b5 | 2019-10-21 16:13:10 +0200 | [diff] [blame] | 1487 | lock_sock(&smc->sk); |
Ursula Braun | 5421ec2 | 2019-11-14 13:02:42 +0100 | [diff] [blame] | 1488 | smc_conn_kill(conn, soft); |
Ursula Braun | 69318b5 | 2019-10-21 16:13:10 +0200 | [diff] [blame] | 1489 | release_sock(&smc->sk); |
Ursula Braun | 81cf4f4 | 2019-10-21 16:13:15 +0200 | [diff] [blame] | 1490 | sock_put(&smc->sk); /* sock_hold above */ |
Ursula Braun | 69318b5 | 2019-10-21 16:13:10 +0200 | [diff] [blame] | 1491 | read_lock_bh(&lgr->conns_lock); |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1492 | node = rb_first(&lgr->conns_all); |
| 1493 | } |
Ursula Braun | 69318b5 | 2019-10-21 16:13:10 +0200 | [diff] [blame] | 1494 | read_unlock_bh(&lgr->conns_lock); |
Ursula Braun | 42bfba9 | 2019-11-14 13:02:41 +0100 | [diff] [blame] | 1495 | smc_lgr_cleanup(lgr); |
Karsten Graul | a52bcc9 | 2020-05-04 14:18:46 +0200 | [diff] [blame] | 1496 | smc_lgr_free(lgr); |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1497 | } |
| 1498 | |
Karsten Graul | 5f78fe9 | 2020-02-17 16:24:54 +0100 | [diff] [blame] | 1499 | /* unlink link group and schedule termination */ |
| 1500 | void smc_lgr_terminate_sched(struct smc_link_group *lgr) |
Hans Wippel | b9f227c | 2018-05-23 16:38:10 +0200 | [diff] [blame] | 1501 | { |
Ursula Braun | a0a62ee | 2019-10-09 10:07:44 +0200 | [diff] [blame] | 1502 | spinlock_t *lgr_lock; |
| 1503 | |
| 1504 | smc_lgr_list_head(lgr, &lgr_lock); |
| 1505 | spin_lock_bh(lgr_lock); |
Karsten Graul | 3739707 | 2020-02-17 16:24:52 +0100 | [diff] [blame] | 1506 | if (list_empty(&lgr->list) || lgr->terminating || lgr->freeing) { |
Ursula Braun | 8caa654 | 2019-10-21 16:13:09 +0200 | [diff] [blame] | 1507 | spin_unlock_bh(lgr_lock); |
| 1508 | return; /* lgr already terminating */ |
| 1509 | } |
| 1510 | list_del_init(&lgr->list); |
Karsten Graul | a52bcc9 | 2020-05-04 14:18:46 +0200 | [diff] [blame] | 1511 | lgr->freeing = 1; |
Ursula Braun | a0a62ee | 2019-10-09 10:07:44 +0200 | [diff] [blame] | 1512 | spin_unlock_bh(lgr_lock); |
Karsten Graul | 5f78fe9 | 2020-02-17 16:24:54 +0100 | [diff] [blame] | 1513 | schedule_work(&lgr->terminate_work); |
Hans Wippel | b9f227c | 2018-05-23 16:38:10 +0200 | [diff] [blame] | 1514 | } |
| 1515 | |
Ursula Braun | 5421ec2 | 2019-11-14 13:02:42 +0100 | [diff] [blame] | 1516 | /* Called when peer lgr shutdown (regularly or abnormally) is received */ |
Hans Wippel | 0512f69 | 2018-11-20 16:46:41 +0100 | [diff] [blame] | 1517 | void smc_smcd_terminate(struct smcd_dev *dev, u64 peer_gid, unsigned short vlan) |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 1518 | { |
| 1519 | struct smc_link_group *lgr, *l; |
| 1520 | LIST_HEAD(lgr_free_list); |
| 1521 | |
| 1522 | /* run common cleanup function and build free list */ |
Ursula Braun | a0a62ee | 2019-10-09 10:07:44 +0200 | [diff] [blame] | 1523 | spin_lock_bh(&dev->lgr_lock); |
Ursula Braun | a2351c5 | 2019-10-09 10:07:43 +0200 | [diff] [blame] | 1524 | list_for_each_entry_safe(lgr, l, &dev->lgr_list, list) { |
| 1525 | if ((!peer_gid || lgr->peer_gid == peer_gid) && |
Hans Wippel | 0512f69 | 2018-11-20 16:46:41 +0100 | [diff] [blame] | 1526 | (vlan == VLAN_VID_MASK || lgr->vlan_id == vlan)) { |
Ursula Braun | 50c6b20 | 2019-11-14 13:02:40 +0100 | [diff] [blame] | 1527 | if (peer_gid) /* peer triggered termination */ |
| 1528 | lgr->peer_shutdown = 1; |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 1529 | list_move(&lgr->list, &lgr_free_list); |
Karsten Graul | a52bcc9 | 2020-05-04 14:18:46 +0200 | [diff] [blame] | 1530 | lgr->freeing = 1; |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 1531 | } |
| 1532 | } |
Ursula Braun | a0a62ee | 2019-10-09 10:07:44 +0200 | [diff] [blame] | 1533 | spin_unlock_bh(&dev->lgr_lock); |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 1534 | |
| 1535 | /* cancel the regular free workers and actually free lgrs */ |
| 1536 | list_for_each_entry_safe(lgr, l, &lgr_free_list, list) { |
| 1537 | list_del_init(&lgr->list); |
Ursula Braun | 50c6b20 | 2019-11-14 13:02:40 +0100 | [diff] [blame] | 1538 | schedule_work(&lgr->terminate_work); |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 1539 | } |
| 1540 | } |
| 1541 | |
Ursula Braun | 5421ec2 | 2019-11-14 13:02:42 +0100 | [diff] [blame] | 1542 | /* Called when an SMCD device is removed or the smc module is unloaded */ |
| 1543 | void smc_smcd_terminate_all(struct smcd_dev *smcd) |
| 1544 | { |
| 1545 | struct smc_link_group *lgr, *lg; |
| 1546 | LIST_HEAD(lgr_free_list); |
| 1547 | |
| 1548 | spin_lock_bh(&smcd->lgr_lock); |
| 1549 | list_splice_init(&smcd->lgr_list, &lgr_free_list); |
| 1550 | list_for_each_entry(lgr, &lgr_free_list, list) |
| 1551 | lgr->freeing = 1; |
| 1552 | spin_unlock_bh(&smcd->lgr_lock); |
| 1553 | |
| 1554 | list_for_each_entry_safe(lgr, lg, &lgr_free_list, list) { |
| 1555 | list_del_init(&lgr->list); |
| 1556 | __smc_lgr_terminate(lgr, false); |
| 1557 | } |
Ursula Braun | 5edd6b9 | 2019-11-14 13:02:43 +0100 | [diff] [blame] | 1558 | |
| 1559 | if (atomic_read(&smcd->lgr_cnt)) |
| 1560 | wait_event(smcd->lgrs_deleted, !atomic_read(&smcd->lgr_cnt)); |
Ursula Braun | 5421ec2 | 2019-11-14 13:02:42 +0100 | [diff] [blame] | 1561 | } |
| 1562 | |
Ursula Braun | 0b29ec64 | 2019-11-14 13:02:47 +0100 | [diff] [blame] | 1563 | /* Called when an SMCR device is removed or the smc module is unloaded. |
| 1564 | * If smcibdev is given, all SMCR link groups using this device are terminated. |
| 1565 | * If smcibdev is NULL, all SMCR link groups are terminated. |
| 1566 | */ |
| 1567 | void smc_smcr_terminate_all(struct smc_ib_device *smcibdev) |
| 1568 | { |
| 1569 | struct smc_link_group *lgr, *lg; |
| 1570 | LIST_HEAD(lgr_free_list); |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 1571 | int i; |
Ursula Braun | 0b29ec64 | 2019-11-14 13:02:47 +0100 | [diff] [blame] | 1572 | |
| 1573 | spin_lock_bh(&smc_lgr_list.lock); |
| 1574 | if (!smcibdev) { |
| 1575 | list_splice_init(&smc_lgr_list.list, &lgr_free_list); |
| 1576 | list_for_each_entry(lgr, &lgr_free_list, list) |
| 1577 | lgr->freeing = 1; |
| 1578 | } else { |
| 1579 | list_for_each_entry_safe(lgr, lg, &smc_lgr_list.list, list) { |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 1580 | for (i = 0; i < SMC_LINKS_PER_LGR_MAX; i++) { |
Karsten Graul | 8752393 | 2020-05-01 12:48:09 +0200 | [diff] [blame] | 1581 | if (lgr->lnk[i].smcibdev == smcibdev) |
Wen Gu | 56d99e8 | 2022-01-16 15:43:42 +0800 | [diff] [blame] | 1582 | smcr_link_down_cond_sched(&lgr->lnk[i]); |
Ursula Braun | 0b29ec64 | 2019-11-14 13:02:47 +0100 | [diff] [blame] | 1583 | } |
| 1584 | } |
| 1585 | } |
| 1586 | spin_unlock_bh(&smc_lgr_list.lock); |
| 1587 | |
| 1588 | list_for_each_entry_safe(lgr, lg, &lgr_free_list, list) { |
| 1589 | list_del_init(&lgr->list); |
Karsten Graul | 3e0c40a | 2020-05-04 14:18:45 +0200 | [diff] [blame] | 1590 | smc_llc_set_termination_rsn(lgr, SMC_LLC_DEL_OP_INIT_TERM); |
Ursula Braun | 0b29ec64 | 2019-11-14 13:02:47 +0100 | [diff] [blame] | 1591 | __smc_lgr_terminate(lgr, false); |
| 1592 | } |
Ursula Braun | 6dabd40 | 2019-11-16 17:47:29 +0100 | [diff] [blame] | 1593 | |
| 1594 | if (smcibdev) { |
| 1595 | if (atomic_read(&smcibdev->lnk_cnt)) |
| 1596 | wait_event(smcibdev->lnks_deleted, |
| 1597 | !atomic_read(&smcibdev->lnk_cnt)); |
| 1598 | } else { |
| 1599 | if (atomic_read(&lgr_cnt)) |
| 1600 | wait_event(lgrs_deleted, !atomic_read(&lgr_cnt)); |
| 1601 | } |
Ursula Braun | 0b29ec64 | 2019-11-14 13:02:47 +0100 | [diff] [blame] | 1602 | } |
| 1603 | |
Karsten Graul | ad6c111b | 2020-05-04 14:18:44 +0200 | [diff] [blame] | 1604 | /* set new lgr type and clear all asymmetric link tagging */ |
| 1605 | void smcr_lgr_set_type(struct smc_link_group *lgr, enum smc_lgr_type new_type) |
| 1606 | { |
Karsten Graul | 0a99be4 | 2020-05-05 15:01:20 +0200 | [diff] [blame] | 1607 | char *lgr_type = ""; |
Karsten Graul | ad6c111b | 2020-05-04 14:18:44 +0200 | [diff] [blame] | 1608 | int i; |
| 1609 | |
| 1610 | for (i = 0; i < SMC_LINKS_PER_LGR_MAX; i++) |
| 1611 | if (smc_link_usable(&lgr->lnk[i])) |
| 1612 | lgr->lnk[i].link_is_asym = false; |
Karsten Graul | 0a99be4 | 2020-05-05 15:01:20 +0200 | [diff] [blame] | 1613 | if (lgr->type == new_type) |
| 1614 | return; |
Karsten Graul | ad6c111b | 2020-05-04 14:18:44 +0200 | [diff] [blame] | 1615 | lgr->type = new_type; |
Karsten Graul | 0a99be4 | 2020-05-05 15:01:20 +0200 | [diff] [blame] | 1616 | |
| 1617 | switch (lgr->type) { |
| 1618 | case SMC_LGR_NONE: |
| 1619 | lgr_type = "NONE"; |
| 1620 | break; |
| 1621 | case SMC_LGR_SINGLE: |
| 1622 | lgr_type = "SINGLE"; |
| 1623 | break; |
| 1624 | case SMC_LGR_SYMMETRIC: |
| 1625 | lgr_type = "SYMMETRIC"; |
| 1626 | break; |
| 1627 | case SMC_LGR_ASYMMETRIC_PEER: |
| 1628 | lgr_type = "ASYMMETRIC_PEER"; |
| 1629 | break; |
| 1630 | case SMC_LGR_ASYMMETRIC_LOCAL: |
| 1631 | lgr_type = "ASYMMETRIC_LOCAL"; |
| 1632 | break; |
| 1633 | } |
Tony Lu | de2fea7 | 2021-12-28 21:06:11 +0800 | [diff] [blame] | 1634 | pr_warn_ratelimited("smc: SMC-R lg %*phN net %llu state changed: " |
Karsten Graul | 0a99be4 | 2020-05-05 15:01:20 +0200 | [diff] [blame] | 1635 | "%s, pnetid %.16s\n", SMC_LGR_ID_SIZE, &lgr->id, |
Tony Lu | de2fea7 | 2021-12-28 21:06:11 +0800 | [diff] [blame] | 1636 | lgr->net->net_cookie, lgr_type, lgr->pnet_id); |
Karsten Graul | ad6c111b | 2020-05-04 14:18:44 +0200 | [diff] [blame] | 1637 | } |
| 1638 | |
| 1639 | /* set new lgr type and tag a link as asymmetric */ |
| 1640 | void smcr_lgr_set_type_asym(struct smc_link_group *lgr, |
| 1641 | enum smc_lgr_type new_type, int asym_lnk_idx) |
| 1642 | { |
| 1643 | smcr_lgr_set_type(lgr, new_type); |
| 1644 | lgr->lnk[asym_lnk_idx].link_is_asym = true; |
| 1645 | } |
| 1646 | |
Karsten Graul | b286a06 | 2020-05-04 14:18:40 +0200 | [diff] [blame] | 1647 | /* abort connection, abort_work scheduled from tasklet context */ |
| 1648 | static void smc_conn_abort_work(struct work_struct *work) |
| 1649 | { |
| 1650 | struct smc_connection *conn = container_of(work, |
| 1651 | struct smc_connection, |
| 1652 | abort_work); |
| 1653 | struct smc_sock *smc = container_of(conn, struct smc_sock, conn); |
| 1654 | |
Karsten Graul | a18cee4 | 2021-09-20 21:18:15 +0200 | [diff] [blame] | 1655 | lock_sock(&smc->sk); |
Karsten Graul | b286a06 | 2020-05-04 14:18:40 +0200 | [diff] [blame] | 1656 | smc_conn_kill(conn, true); |
Karsten Graul | a18cee4 | 2021-09-20 21:18:15 +0200 | [diff] [blame] | 1657 | release_sock(&smc->sk); |
Karsten Graul | b286a06 | 2020-05-04 14:18:40 +0200 | [diff] [blame] | 1658 | sock_put(&smc->sk); /* sock_hold done by schedulers of abort_work */ |
| 1659 | } |
| 1660 | |
Karsten Graul | 1f90a05 | 2020-05-01 12:48:07 +0200 | [diff] [blame] | 1661 | void smcr_port_add(struct smc_ib_device *smcibdev, u8 ibport) |
| 1662 | { |
Karsten Graul | 1f90a05 | 2020-05-01 12:48:07 +0200 | [diff] [blame] | 1663 | struct smc_link_group *lgr, *n; |
| 1664 | |
Guangguan Wang | f5146e3 | 2023-09-08 11:31:43 +0800 | [diff] [blame] | 1665 | spin_lock_bh(&smc_lgr_list.lock); |
Karsten Graul | 1f90a05 | 2020-05-01 12:48:07 +0200 | [diff] [blame] | 1666 | list_for_each_entry_safe(lgr, n, &smc_lgr_list.list, list) { |
Karsten Graul | c48254f | 2020-07-18 15:06:14 +0200 | [diff] [blame] | 1667 | struct smc_link *link; |
| 1668 | |
Karsten Graul | 1f90a05 | 2020-05-01 12:48:07 +0200 | [diff] [blame] | 1669 | if (strncmp(smcibdev->pnetid[ibport - 1], lgr->pnet_id, |
| 1670 | SMC_MAX_PNETID_LEN) || |
| 1671 | lgr->type == SMC_LGR_SYMMETRIC || |
Tony Lu | 0237a3a | 2021-12-28 21:06:09 +0800 | [diff] [blame] | 1672 | lgr->type == SMC_LGR_ASYMMETRIC_PEER || |
| 1673 | !rdma_dev_access_netns(smcibdev->ibdev, lgr->net)) |
Karsten Graul | 1f90a05 | 2020-05-01 12:48:07 +0200 | [diff] [blame] | 1674 | continue; |
Karsten Graul | c48254f | 2020-07-18 15:06:14 +0200 | [diff] [blame] | 1675 | |
Guangguan Wang | 69b888e | 2023-08-17 21:20:31 +0800 | [diff] [blame] | 1676 | if (lgr->type == SMC_LGR_SINGLE && lgr->max_links <= 1) |
| 1677 | continue; |
| 1678 | |
Karsten Graul | c48254f | 2020-07-18 15:06:14 +0200 | [diff] [blame] | 1679 | /* trigger local add link processing */ |
| 1680 | link = smc_llc_usable_link(lgr); |
| 1681 | if (link) |
| 1682 | smc_llc_add_link_local(link); |
Karsten Graul | 1f90a05 | 2020-05-01 12:48:07 +0200 | [diff] [blame] | 1683 | } |
Guangguan Wang | f5146e3 | 2023-09-08 11:31:43 +0800 | [diff] [blame] | 1684 | spin_unlock_bh(&smc_lgr_list.lock); |
Karsten Graul | 1f90a05 | 2020-05-01 12:48:07 +0200 | [diff] [blame] | 1685 | } |
| 1686 | |
Karsten Graul | 541afa1 | 2020-05-01 12:48:08 +0200 | [diff] [blame] | 1687 | /* link is down - switch connections to alternate link, |
| 1688 | * must be called under lgr->llc_conf_mutex lock |
| 1689 | */ |
| 1690 | static void smcr_link_down(struct smc_link *lnk) |
| 1691 | { |
| 1692 | struct smc_link_group *lgr = lnk->lgr; |
| 1693 | struct smc_link *to_lnk; |
| 1694 | int del_link_id; |
| 1695 | |
| 1696 | if (!lgr || lnk->state == SMC_LNK_UNUSED || list_empty(&lgr->list)) |
| 1697 | return; |
| 1698 | |
Karsten Graul | c6f02eb | 2020-05-04 14:18:38 +0200 | [diff] [blame] | 1699 | to_lnk = smc_switch_conns(lgr, lnk, true); |
Karsten Graul | 541afa1 | 2020-05-01 12:48:08 +0200 | [diff] [blame] | 1700 | if (!to_lnk) { /* no backup link available */ |
Karsten Graul | 0a99be4 | 2020-05-05 15:01:20 +0200 | [diff] [blame] | 1701 | smcr_link_clear(lnk, true); |
Karsten Graul | 541afa1 | 2020-05-01 12:48:08 +0200 | [diff] [blame] | 1702 | return; |
| 1703 | } |
Karsten Graul | ad6c111b | 2020-05-04 14:18:44 +0200 | [diff] [blame] | 1704 | smcr_lgr_set_type(lgr, SMC_LGR_SINGLE); |
Karsten Graul | 541afa1 | 2020-05-01 12:48:08 +0200 | [diff] [blame] | 1705 | del_link_id = lnk->link_id; |
| 1706 | |
| 1707 | if (lgr->role == SMC_SERV) { |
| 1708 | /* trigger local delete link processing */ |
Karsten Graul | 4dadd15 | 2020-05-03 14:38:50 +0200 | [diff] [blame] | 1709 | smc_llc_srv_delete_link_local(to_lnk, del_link_id); |
Karsten Graul | 541afa1 | 2020-05-01 12:48:08 +0200 | [diff] [blame] | 1710 | } else { |
| 1711 | if (lgr->llc_flow_lcl.type != SMC_LLC_FLOW_NONE) { |
| 1712 | /* another llc task is ongoing */ |
D. Wythe | b5dd4d6 | 2023-02-02 16:26:39 +0800 | [diff] [blame] | 1713 | up_write(&lgr->llc_conf_mutex); |
Karsten Graul | 6778a6b | 2020-07-08 17:05:11 +0200 | [diff] [blame] | 1714 | wait_event_timeout(lgr->llc_flow_waiter, |
| 1715 | (list_empty(&lgr->list) || |
| 1716 | lgr->llc_flow_lcl.type == SMC_LLC_FLOW_NONE), |
Karsten Graul | 541afa1 | 2020-05-01 12:48:08 +0200 | [diff] [blame] | 1717 | SMC_LLC_WAIT_TIME); |
D. Wythe | b5dd4d6 | 2023-02-02 16:26:39 +0800 | [diff] [blame] | 1718 | down_write(&lgr->llc_conf_mutex); |
Karsten Graul | 541afa1 | 2020-05-01 12:48:08 +0200 | [diff] [blame] | 1719 | } |
Karsten Graul | 68fd894 | 2020-07-18 15:06:10 +0200 | [diff] [blame] | 1720 | if (!list_empty(&lgr->list)) { |
Karsten Graul | 6778a6b | 2020-07-08 17:05:11 +0200 | [diff] [blame] | 1721 | smc_llc_send_delete_link(to_lnk, del_link_id, |
| 1722 | SMC_LLC_REQ, true, |
| 1723 | SMC_LLC_DEL_LOST_PATH); |
Karsten Graul | 68fd894 | 2020-07-18 15:06:10 +0200 | [diff] [blame] | 1724 | smcr_link_clear(lnk, true); |
| 1725 | } |
Karsten Graul | 6778a6b | 2020-07-08 17:05:11 +0200 | [diff] [blame] | 1726 | wake_up(&lgr->llc_flow_waiter); /* wake up next waiter */ |
Karsten Graul | 541afa1 | 2020-05-01 12:48:08 +0200 | [diff] [blame] | 1727 | } |
| 1728 | } |
| 1729 | |
| 1730 | /* must be called under lgr->llc_conf_mutex lock */ |
| 1731 | void smcr_link_down_cond(struct smc_link *lnk) |
| 1732 | { |
Tony Lu | a3a0e81 | 2021-11-01 15:39:16 +0800 | [diff] [blame] | 1733 | if (smc_link_downing(&lnk->state)) { |
| 1734 | trace_smcr_link_down(lnk, __builtin_return_address(0)); |
Karsten Graul | 541afa1 | 2020-05-01 12:48:08 +0200 | [diff] [blame] | 1735 | smcr_link_down(lnk); |
Tony Lu | a3a0e81 | 2021-11-01 15:39:16 +0800 | [diff] [blame] | 1736 | } |
Karsten Graul | 541afa1 | 2020-05-01 12:48:08 +0200 | [diff] [blame] | 1737 | } |
| 1738 | |
| 1739 | /* will get the lgr->llc_conf_mutex lock */ |
| 1740 | void smcr_link_down_cond_sched(struct smc_link *lnk) |
| 1741 | { |
Tony Lu | a3a0e81 | 2021-11-01 15:39:16 +0800 | [diff] [blame] | 1742 | if (smc_link_downing(&lnk->state)) { |
| 1743 | trace_smcr_link_down(lnk, __builtin_return_address(0)); |
Karsten Graul | 541afa1 | 2020-05-01 12:48:08 +0200 | [diff] [blame] | 1744 | schedule_work(&lnk->link_down_wrk); |
Tony Lu | a3a0e81 | 2021-11-01 15:39:16 +0800 | [diff] [blame] | 1745 | } |
Karsten Graul | 541afa1 | 2020-05-01 12:48:08 +0200 | [diff] [blame] | 1746 | } |
| 1747 | |
| 1748 | void smcr_port_err(struct smc_ib_device *smcibdev, u8 ibport) |
| 1749 | { |
| 1750 | struct smc_link_group *lgr, *n; |
| 1751 | int i; |
| 1752 | |
| 1753 | list_for_each_entry_safe(lgr, n, &smc_lgr_list.list, list) { |
| 1754 | if (strncmp(smcibdev->pnetid[ibport - 1], lgr->pnet_id, |
| 1755 | SMC_MAX_PNETID_LEN)) |
| 1756 | continue; /* lgr is not affected */ |
| 1757 | if (list_empty(&lgr->list)) |
| 1758 | continue; |
| 1759 | for (i = 0; i < SMC_LINKS_PER_LGR_MAX; i++) { |
| 1760 | struct smc_link *lnk = &lgr->lnk[i]; |
| 1761 | |
| 1762 | if (smc_link_usable(lnk) && |
| 1763 | lnk->smcibdev == smcibdev && lnk->ibport == ibport) |
| 1764 | smcr_link_down_cond_sched(lnk); |
| 1765 | } |
| 1766 | } |
| 1767 | } |
| 1768 | |
Karsten Graul | 541afa1 | 2020-05-01 12:48:08 +0200 | [diff] [blame] | 1769 | static void smc_link_down_work(struct work_struct *work) |
| 1770 | { |
| 1771 | struct smc_link *link = container_of(work, struct smc_link, |
| 1772 | link_down_wrk); |
| 1773 | struct smc_link_group *lgr = link->lgr; |
| 1774 | |
| 1775 | if (list_empty(&lgr->list)) |
| 1776 | return; |
Karsten Graul | 6778a6b | 2020-07-08 17:05:11 +0200 | [diff] [blame] | 1777 | wake_up_all(&lgr->llc_msg_waiter); |
D. Wythe | b5dd4d6 | 2023-02-02 16:26:39 +0800 | [diff] [blame] | 1778 | down_write(&lgr->llc_conf_mutex); |
Karsten Graul | 541afa1 | 2020-05-01 12:48:08 +0200 | [diff] [blame] | 1779 | smcr_link_down(link); |
D. Wythe | b5dd4d6 | 2023-02-02 16:26:39 +0800 | [diff] [blame] | 1780 | up_write(&lgr->llc_conf_mutex); |
Karsten Graul | 541afa1 | 2020-05-01 12:48:08 +0200 | [diff] [blame] | 1781 | } |
| 1782 | |
Karsten Graul | 587acad | 2021-11-24 13:32:37 +0100 | [diff] [blame] | 1783 | static int smc_vlan_by_tcpsk_walk(struct net_device *lower_dev, |
| 1784 | struct netdev_nested_priv *priv) |
| 1785 | { |
| 1786 | unsigned short *vlan_id = (unsigned short *)priv->data; |
| 1787 | |
| 1788 | if (is_vlan_dev(lower_dev)) { |
| 1789 | *vlan_id = vlan_dev_vlan_id(lower_dev); |
| 1790 | return 1; |
| 1791 | } |
| 1792 | |
| 1793 | return 0; |
| 1794 | } |
| 1795 | |
| 1796 | /* Determine vlan of internal TCP socket. */ |
Karsten Graul | bc36d2f | 2019-04-12 12:57:26 +0200 | [diff] [blame] | 1797 | int smc_vlan_by_tcpsk(struct socket *clcsock, struct smc_init_info *ini) |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1798 | { |
| 1799 | struct dst_entry *dst = sk_dst_get(clcsock->sk); |
Karsten Graul | 587acad | 2021-11-24 13:32:37 +0100 | [diff] [blame] | 1800 | struct netdev_nested_priv priv; |
Ursula Braun | cb9d43f | 2018-05-02 16:56:47 +0200 | [diff] [blame] | 1801 | struct net_device *ndev; |
Karsten Graul | 587acad | 2021-11-24 13:32:37 +0100 | [diff] [blame] | 1802 | int rc = 0; |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1803 | |
Karsten Graul | bc36d2f | 2019-04-12 12:57:26 +0200 | [diff] [blame] | 1804 | ini->vlan_id = 0; |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1805 | if (!dst) { |
| 1806 | rc = -ENOTCONN; |
| 1807 | goto out; |
| 1808 | } |
| 1809 | if (!dst->dev) { |
| 1810 | rc = -ENODEV; |
| 1811 | goto out_rel; |
| 1812 | } |
| 1813 | |
Ursula Braun | cb9d43f | 2018-05-02 16:56:47 +0200 | [diff] [blame] | 1814 | ndev = dst->dev; |
| 1815 | if (is_vlan_dev(ndev)) { |
Karsten Graul | bc36d2f | 2019-04-12 12:57:26 +0200 | [diff] [blame] | 1816 | ini->vlan_id = vlan_dev_vlan_id(ndev); |
Ursula Braun | cb9d43f | 2018-05-02 16:56:47 +0200 | [diff] [blame] | 1817 | goto out_rel; |
| 1818 | } |
| 1819 | |
Karsten Graul | 587acad | 2021-11-24 13:32:37 +0100 | [diff] [blame] | 1820 | priv.data = (void *)&ini->vlan_id; |
Ursula Braun | cb9d43f | 2018-05-02 16:56:47 +0200 | [diff] [blame] | 1821 | rtnl_lock(); |
Karsten Graul | 587acad | 2021-11-24 13:32:37 +0100 | [diff] [blame] | 1822 | netdev_walk_all_lower_dev(ndev, smc_vlan_by_tcpsk_walk, &priv); |
Ursula Braun | cb9d43f | 2018-05-02 16:56:47 +0200 | [diff] [blame] | 1823 | rtnl_unlock(); |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1824 | |
| 1825 | out_rel: |
| 1826 | dst_release(dst); |
| 1827 | out: |
| 1828 | return rc; |
| 1829 | } |
| 1830 | |
Karsten Graul | e49300a | 2021-10-16 11:37:46 +0200 | [diff] [blame] | 1831 | static bool smcr_lgr_match(struct smc_link_group *lgr, u8 smcr_version, |
| 1832 | u8 peer_systemid[], |
| 1833 | u8 peer_gid[], |
| 1834 | u8 peer_mac_v1[], |
Tony Lu | 0237a3a | 2021-12-28 21:06:09 +0800 | [diff] [blame] | 1835 | enum smc_lgr_role role, u32 clcqpn, |
| 1836 | struct net *net) |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1837 | { |
Tony Lu | 0237a3a | 2021-12-28 21:06:09 +0800 | [diff] [blame] | 1838 | struct smc_link *lnk; |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 1839 | int i; |
| 1840 | |
Karsten Graul | e49300a | 2021-10-16 11:37:46 +0200 | [diff] [blame] | 1841 | if (memcmp(lgr->peer_systemid, peer_systemid, SMC_SYSTEMID_LEN) || |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 1842 | lgr->role != role) |
| 1843 | return false; |
| 1844 | |
| 1845 | for (i = 0; i < SMC_LINKS_PER_LGR_MAX; i++) { |
Tony Lu | 0237a3a | 2021-12-28 21:06:09 +0800 | [diff] [blame] | 1846 | lnk = &lgr->lnk[i]; |
| 1847 | |
| 1848 | if (!smc_link_active(lnk)) |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 1849 | continue; |
Tony Lu | 0237a3a | 2021-12-28 21:06:09 +0800 | [diff] [blame] | 1850 | /* use verbs API to check netns, instead of lgr->net */ |
| 1851 | if (!rdma_dev_access_netns(lnk->smcibdev->ibdev, net)) |
| 1852 | return false; |
| 1853 | if ((lgr->role == SMC_SERV || lnk->peer_qpn == clcqpn) && |
| 1854 | !memcmp(lnk->peer_gid, peer_gid, SMC_GID_SIZE) && |
Karsten Graul | e49300a | 2021-10-16 11:37:46 +0200 | [diff] [blame] | 1855 | (smcr_version == SMC_V2 || |
Tony Lu | 0237a3a | 2021-12-28 21:06:09 +0800 | [diff] [blame] | 1856 | !memcmp(lnk->peer_mac, peer_mac_v1, ETH_ALEN))) |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 1857 | return true; |
| 1858 | } |
| 1859 | return false; |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 1860 | } |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1861 | |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 1862 | static bool smcd_lgr_match(struct smc_link_group *lgr, |
| 1863 | struct smcd_dev *smcismdev, u64 peer_gid) |
| 1864 | { |
| 1865 | return lgr->peer_gid == peer_gid && lgr->smcd == smcismdev; |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1866 | } |
| 1867 | |
| 1868 | /* create a new SMC connection (and a new link group if necessary) */ |
Karsten Graul | bc36d2f | 2019-04-12 12:57:26 +0200 | [diff] [blame] | 1869 | int smc_conn_create(struct smc_sock *smc, struct smc_init_info *ini) |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1870 | { |
| 1871 | struct smc_connection *conn = &smc->conn; |
Tony Lu | 0237a3a | 2021-12-28 21:06:09 +0800 | [diff] [blame] | 1872 | struct net *net = sock_net(&smc->sk); |
Ursula Braun | a2351c5 | 2019-10-09 10:07:43 +0200 | [diff] [blame] | 1873 | struct list_head *lgr_list; |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1874 | struct smc_link_group *lgr; |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1875 | enum smc_lgr_role role; |
Ursula Braun | a0a62ee | 2019-10-09 10:07:44 +0200 | [diff] [blame] | 1876 | spinlock_t *lgr_lock; |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1877 | int rc = 0; |
| 1878 | |
Ursula Braun | 5c21c4c | 2020-09-26 12:44:29 +0200 | [diff] [blame] | 1879 | lgr_list = ini->is_smcd ? &ini->ism_dev[ini->ism_selected]->lgr_list : |
Ursula Braun | 3fc6493 | 2020-09-26 12:44:23 +0200 | [diff] [blame] | 1880 | &smc_lgr_list.list; |
Ursula Braun | 5c21c4c | 2020-09-26 12:44:29 +0200 | [diff] [blame] | 1881 | lgr_lock = ini->is_smcd ? &ini->ism_dev[ini->ism_selected]->lgr_lock : |
Ursula Braun | 3fc6493 | 2020-09-26 12:44:23 +0200 | [diff] [blame] | 1882 | &smc_lgr_list.lock; |
Ursula Braun | 5ac54d8 | 2020-09-10 18:48:21 +0200 | [diff] [blame] | 1883 | ini->first_contact_local = 1; |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1884 | role = smc->listen_smc ? SMC_SERV : SMC_CLNT; |
Ursula Braun | 5ac54d8 | 2020-09-10 18:48:21 +0200 | [diff] [blame] | 1885 | if (role == SMC_CLNT && ini->first_contact_peer) |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1886 | /* create new link group as well */ |
| 1887 | goto create; |
| 1888 | |
| 1889 | /* determine if an existing link group can be reused */ |
Ursula Braun | a0a62ee | 2019-10-09 10:07:44 +0200 | [diff] [blame] | 1890 | spin_lock_bh(lgr_lock); |
Ursula Braun | a2351c5 | 2019-10-09 10:07:43 +0200 | [diff] [blame] | 1891 | list_for_each_entry(lgr, lgr_list, list) { |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1892 | write_lock_bh(&lgr->conns_lock); |
Karsten Graul | bc36d2f | 2019-04-12 12:57:26 +0200 | [diff] [blame] | 1893 | if ((ini->is_smcd ? |
Ursula Braun | 5c21c4c | 2020-09-26 12:44:29 +0200 | [diff] [blame] | 1894 | smcd_lgr_match(lgr, ini->ism_dev[ini->ism_selected], |
| 1895 | ini->ism_peer_gid[ini->ism_selected]) : |
Karsten Graul | e49300a | 2021-10-16 11:37:46 +0200 | [diff] [blame] | 1896 | smcr_lgr_match(lgr, ini->smcr_version, |
| 1897 | ini->peer_systemid, |
| 1898 | ini->peer_gid, ini->peer_mac, role, |
Tony Lu | 0237a3a | 2021-12-28 21:06:09 +0800 | [diff] [blame] | 1899 | ini->ib_clcqpn, net)) && |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1900 | !lgr->sync_err && |
Karsten Graul | 0530bd6 | 2020-11-18 22:40:37 +0100 | [diff] [blame] | 1901 | (ini->smcd_version == SMC_V2 || |
| 1902 | lgr->vlan_id == ini->vlan_id) && |
Karsten Graul | a9e4450 | 2020-07-20 16:24:29 +0200 | [diff] [blame] | 1903 | (role == SMC_CLNT || ini->is_smcd || |
Guangguan Wang | 7f0620b | 2023-08-17 21:20:30 +0800 | [diff] [blame] | 1904 | (lgr->conns_num < lgr->max_conns && |
D. Wythe | 4940a1f | 2022-03-02 21:25:12 +0800 | [diff] [blame] | 1905 | !bitmap_full(lgr->rtokens_used_mask, SMC_RMBS_PER_LGR_MAX)))) { |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1906 | /* link group found */ |
Ursula Braun | 5ac54d8 | 2020-09-10 18:48:21 +0200 | [diff] [blame] | 1907 | ini->first_contact_local = 0; |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1908 | conn->lgr = lgr; |
Karsten Graul | 56bc3b2 | 2020-05-04 14:18:43 +0200 | [diff] [blame] | 1909 | rc = smc_lgr_register_conn(conn, false); |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1910 | write_unlock_bh(&lgr->conns_lock); |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 1911 | if (!rc && delayed_work_pending(&lgr->free_work)) |
| 1912 | cancel_delayed_work(&lgr->free_work); |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1913 | break; |
| 1914 | } |
| 1915 | write_unlock_bh(&lgr->conns_lock); |
| 1916 | } |
Ursula Braun | a0a62ee | 2019-10-09 10:07:44 +0200 | [diff] [blame] | 1917 | spin_unlock_bh(lgr_lock); |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 1918 | if (rc) |
| 1919 | return rc; |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1920 | |
Ursula Braun | 5ac54d8 | 2020-09-10 18:48:21 +0200 | [diff] [blame] | 1921 | if (role == SMC_CLNT && !ini->first_contact_peer && |
| 1922 | ini->first_contact_local) { |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1923 | /* Server reuses a link group, but Client wants to start |
| 1924 | * a new one |
| 1925 | * send out_of_sync decline, reason synchr. error |
| 1926 | */ |
Karsten Graul | 7a62725a | 2019-04-12 12:57:30 +0200 | [diff] [blame] | 1927 | return SMC_CLC_DECL_SYNCERR; |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1928 | } |
| 1929 | |
| 1930 | create: |
Ursula Braun | 5ac54d8 | 2020-09-10 18:48:21 +0200 | [diff] [blame] | 1931 | if (ini->first_contact_local) { |
Karsten Graul | bc36d2f | 2019-04-12 12:57:26 +0200 | [diff] [blame] | 1932 | rc = smc_lgr_create(smc, ini); |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1933 | if (rc) |
| 1934 | goto out; |
Huaping Zhou | 4480879 | 2019-06-26 17:47:49 +0200 | [diff] [blame] | 1935 | lgr = conn->lgr; |
| 1936 | write_lock_bh(&lgr->conns_lock); |
Karsten Graul | 56bc3b2 | 2020-05-04 14:18:43 +0200 | [diff] [blame] | 1937 | rc = smc_lgr_register_conn(conn, true); |
Huaping Zhou | 4480879 | 2019-06-26 17:47:49 +0200 | [diff] [blame] | 1938 | write_unlock_bh(&lgr->conns_lock); |
Wen Gu | 36595d8 | 2022-01-06 20:42:08 +0800 | [diff] [blame] | 1939 | if (rc) { |
| 1940 | smc_lgr_cleanup_early(lgr); |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 1941 | goto out; |
Wen Gu | 36595d8 | 2022-01-06 20:42:08 +0800 | [diff] [blame] | 1942 | } |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1943 | } |
Wen Gu | 61f434b | 2022-01-13 16:36:40 +0800 | [diff] [blame] | 1944 | smc_lgr_hold(conn->lgr); /* lgr_put in smc_conn_free() */ |
Wen Gu | 20c9398 | 2022-01-13 16:36:42 +0800 | [diff] [blame] | 1945 | if (!conn->lgr->is_smcd) |
| 1946 | smcr_link_hold(conn->lnk); /* link_put in smc_conn_free() */ |
Wen Gu | 61f434b | 2022-01-13 16:36:40 +0800 | [diff] [blame] | 1947 | conn->freed = 0; |
Ursula Braun | 5f08318 | 2017-01-09 16:55:22 +0100 | [diff] [blame] | 1948 | conn->local_tx_ctrl.common.type = SMC_CDC_MSG_TYPE; |
Karsten Graul | cbba07a | 2018-02-28 12:44:07 +0100 | [diff] [blame] | 1949 | conn->local_tx_ctrl.len = SMC_WR_TX_SIZE; |
Stefan Raspl | de8474eb | 2018-05-23 16:38:11 +0200 | [diff] [blame] | 1950 | conn->urg_state = SMC_URG_READ; |
Dust Li | 349d431 | 2021-12-28 17:03:25 +0800 | [diff] [blame] | 1951 | init_waitqueue_head(&conn->cdc_pend_tx_wq); |
Karsten Graul | b286a06 | 2020-05-04 14:18:40 +0200 | [diff] [blame] | 1952 | INIT_WORK(&smc->conn.abort_work, smc_conn_abort_work); |
Karsten Graul | bc36d2f | 2019-04-12 12:57:26 +0200 | [diff] [blame] | 1953 | if (ini->is_smcd) { |
Hans Wippel | be244f2 | 2018-06-28 19:05:10 +0200 | [diff] [blame] | 1954 | conn->rx_off = sizeof(struct smcd_cdc_msg); |
| 1955 | smcd_cdc_rx_init(conn); /* init tasklet for this conn */ |
Ursula Braun | 2d2bfeb | 2020-09-03 21:53:16 +0200 | [diff] [blame] | 1956 | } else { |
| 1957 | conn->rx_off = 0; |
Hans Wippel | be244f2 | 2018-06-28 19:05:10 +0200 | [diff] [blame] | 1958 | } |
Ursula Braun | 5f08318 | 2017-01-09 16:55:22 +0100 | [diff] [blame] | 1959 | #ifndef KERNEL_HAS_ATOMIC64 |
| 1960 | spin_lock_init(&conn->acurs_lock); |
| 1961 | #endif |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1962 | |
| 1963 | out: |
Karsten Graul | 7a62725a | 2019-04-12 12:57:30 +0200 | [diff] [blame] | 1964 | return rc; |
Ursula Braun | 0cfdd8f | 2017-01-09 16:55:17 +0100 | [diff] [blame] | 1965 | } |
Ursula Braun | cd6851f | 2017-01-09 16:55:18 +0100 | [diff] [blame] | 1966 | |
Stefan Raspl | 6716177 | 2021-08-09 10:10:14 +0200 | [diff] [blame] | 1967 | #define SMCD_DMBE_SIZES 6 /* 0 -> 16KB, 1 -> 32KB, .. 6 -> 1MB */ |
| 1968 | #define SMCR_RMBE_SIZES 5 /* 0 -> 16KB, 1 -> 32KB, .. 5 -> 512KB */ |
| 1969 | |
| 1970 | /* convert the RMB size into the compressed notation (minimum 16K, see |
| 1971 | * SMCD/R_DMBE_SIZES. |
Hans Wippel | 2f6beca | 2018-05-18 09:34:14 +0200 | [diff] [blame] | 1972 | * In contrast to plain ilog2, this rounds towards the next power of 2, |
| 1973 | * so the socket application gets at least its desired sndbuf / rcvbuf size. |
| 1974 | */ |
Stefan Raspl | 6716177 | 2021-08-09 10:10:14 +0200 | [diff] [blame] | 1975 | static u8 smc_compress_bufsize(int size, bool is_smcd, bool is_rmb) |
Hans Wippel | 2f6beca | 2018-05-18 09:34:14 +0200 | [diff] [blame] | 1976 | { |
Stefan Raspl | 6716177 | 2021-08-09 10:10:14 +0200 | [diff] [blame] | 1977 | const unsigned int max_scat = SG_MAX_SINGLE_ALLOC * PAGE_SIZE; |
Hans Wippel | 2f6beca | 2018-05-18 09:34:14 +0200 | [diff] [blame] | 1978 | u8 compressed; |
| 1979 | |
| 1980 | if (size <= SMC_BUF_MIN_SIZE) |
| 1981 | return 0; |
| 1982 | |
Stefan Raspl | 6716177 | 2021-08-09 10:10:14 +0200 | [diff] [blame] | 1983 | size = (size - 1) >> 14; /* convert to 16K multiple */ |
| 1984 | compressed = min_t(u8, ilog2(size) + 1, |
| 1985 | is_smcd ? SMCD_DMBE_SIZES : SMCR_RMBE_SIZES); |
| 1986 | |
| 1987 | if (!is_smcd && is_rmb) |
| 1988 | /* RMBs are backed by & limited to max size of scatterlists */ |
| 1989 | compressed = min_t(u8, compressed, ilog2(max_scat >> 14)); |
| 1990 | |
Hans Wippel | 2f6beca | 2018-05-18 09:34:14 +0200 | [diff] [blame] | 1991 | return compressed; |
| 1992 | } |
| 1993 | |
| 1994 | /* convert the RMB size from compressed notation into integer */ |
| 1995 | int smc_uncompress_bufsize(u8 compressed) |
| 1996 | { |
| 1997 | u32 size; |
| 1998 | |
| 1999 | size = 0x00000001 << (((int)compressed) + 14); |
| 2000 | return (int)size; |
| 2001 | } |
| 2002 | |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 2003 | /* try to reuse a sndbuf or rmb description slot for a certain |
| 2004 | * buffer size; if not available, return NULL |
Ursula Braun | cd6851f | 2017-01-09 16:55:18 +0100 | [diff] [blame] | 2005 | */ |
Hans Wippel | 8437bda | 2018-05-18 09:34:16 +0200 | [diff] [blame] | 2006 | static struct smc_buf_desc *smc_buf_get_slot(int compressed_bufsize, |
D. Wythe | aff7bfe | 2023-02-02 16:26:42 +0800 | [diff] [blame] | 2007 | struct rw_semaphore *lock, |
Hans Wippel | 8437bda | 2018-05-18 09:34:16 +0200 | [diff] [blame] | 2008 | struct list_head *buf_list) |
Ursula Braun | cd6851f | 2017-01-09 16:55:18 +0100 | [diff] [blame] | 2009 | { |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 2010 | struct smc_buf_desc *buf_slot; |
Ursula Braun | cd6851f | 2017-01-09 16:55:18 +0100 | [diff] [blame] | 2011 | |
D. Wythe | aff7bfe | 2023-02-02 16:26:42 +0800 | [diff] [blame] | 2012 | down_read(lock); |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 2013 | list_for_each_entry(buf_slot, buf_list, list) { |
| 2014 | if (cmpxchg(&buf_slot->used, 0, 1) == 0) { |
D. Wythe | aff7bfe | 2023-02-02 16:26:42 +0800 | [diff] [blame] | 2015 | up_read(lock); |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 2016 | return buf_slot; |
Ursula Braun | cd6851f | 2017-01-09 16:55:18 +0100 | [diff] [blame] | 2017 | } |
| 2018 | } |
D. Wythe | aff7bfe | 2023-02-02 16:26:42 +0800 | [diff] [blame] | 2019 | up_read(lock); |
Ursula Braun | cd6851f | 2017-01-09 16:55:18 +0100 | [diff] [blame] | 2020 | return NULL; |
| 2021 | } |
| 2022 | |
Ursula Braun | 952310c | 2017-01-09 16:55:24 +0100 | [diff] [blame] | 2023 | /* one of the conditions for announcing a receiver's current window size is |
| 2024 | * that it "results in a minimum increase in the window size of 10% of the |
| 2025 | * receive buffer space" [RFC7609] |
| 2026 | */ |
| 2027 | static inline int smc_rmb_wnd_update_limit(int rmbe_size) |
| 2028 | { |
Dust Li | 6bf536e | 2022-03-01 17:44:00 +0800 | [diff] [blame] | 2029 | return max_t(int, rmbe_size / 10, SOCK_MIN_SNDBUF / 2); |
Ursula Braun | 952310c | 2017-01-09 16:55:24 +0100 | [diff] [blame] | 2030 | } |
| 2031 | |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 2032 | /* map an buf to a link */ |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 2033 | static int smcr_buf_map_link(struct smc_buf_desc *buf_desc, bool is_rmb, |
| 2034 | struct smc_link *lnk) |
| 2035 | { |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 2036 | int rc, i, nents, offset, buf_size, size, access_flags; |
| 2037 | struct scatterlist *sg; |
| 2038 | void *buf; |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 2039 | |
| 2040 | if (buf_desc->is_map_ib[lnk->link_idx]) |
| 2041 | return 0; |
| 2042 | |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 2043 | if (buf_desc->is_vm) { |
| 2044 | buf = buf_desc->cpu_addr; |
| 2045 | buf_size = buf_desc->len; |
| 2046 | offset = offset_in_page(buf_desc->cpu_addr); |
| 2047 | nents = PAGE_ALIGN(buf_size + offset) / PAGE_SIZE; |
| 2048 | } else { |
| 2049 | nents = 1; |
| 2050 | } |
| 2051 | |
| 2052 | rc = sg_alloc_table(&buf_desc->sgt[lnk->link_idx], nents, GFP_KERNEL); |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 2053 | if (rc) |
| 2054 | return rc; |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 2055 | |
| 2056 | if (buf_desc->is_vm) { |
| 2057 | /* virtually contiguous buffer */ |
| 2058 | for_each_sg(buf_desc->sgt[lnk->link_idx].sgl, sg, nents, i) { |
| 2059 | size = min_t(int, PAGE_SIZE - offset, buf_size); |
| 2060 | sg_set_page(sg, vmalloc_to_page(buf), size, offset); |
| 2061 | buf += size / sizeof(*buf); |
| 2062 | buf_size -= size; |
| 2063 | offset = 0; |
| 2064 | } |
| 2065 | } else { |
| 2066 | /* physically contiguous buffer */ |
| 2067 | sg_set_buf(buf_desc->sgt[lnk->link_idx].sgl, |
| 2068 | buf_desc->cpu_addr, buf_desc->len); |
| 2069 | } |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 2070 | |
| 2071 | /* map sg table to DMA address */ |
| 2072 | rc = smc_ib_buf_map_sg(lnk, buf_desc, |
| 2073 | is_rmb ? DMA_FROM_DEVICE : DMA_TO_DEVICE); |
| 2074 | /* SMC protocol depends on mapping to one DMA address only */ |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 2075 | if (rc != nents) { |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 2076 | rc = -EAGAIN; |
| 2077 | goto free_table; |
| 2078 | } |
| 2079 | |
Guangguan Wang | 0ef69e7 | 2022-07-14 17:44:01 +0800 | [diff] [blame] | 2080 | buf_desc->is_dma_need_sync |= |
| 2081 | smc_ib_is_sg_need_sync(lnk, buf_desc) << lnk->link_idx; |
| 2082 | |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 2083 | if (is_rmb || buf_desc->is_vm) { |
| 2084 | /* create a new memory region for the RMB or vzalloced sndbuf */ |
| 2085 | access_flags = is_rmb ? |
| 2086 | IB_ACCESS_REMOTE_WRITE | IB_ACCESS_LOCAL_WRITE : |
| 2087 | IB_ACCESS_LOCAL_WRITE; |
| 2088 | |
| 2089 | rc = smc_ib_get_memory_region(lnk->roce_pd, access_flags, |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 2090 | buf_desc, lnk->link_idx); |
| 2091 | if (rc) |
| 2092 | goto buf_unmap; |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 2093 | smc_ib_sync_sg_for_device(lnk, buf_desc, |
| 2094 | is_rmb ? DMA_FROM_DEVICE : DMA_TO_DEVICE); |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 2095 | } |
| 2096 | buf_desc->is_map_ib[lnk->link_idx] = true; |
| 2097 | return 0; |
| 2098 | |
| 2099 | buf_unmap: |
| 2100 | smc_ib_buf_unmap_sg(lnk, buf_desc, |
| 2101 | is_rmb ? DMA_FROM_DEVICE : DMA_TO_DEVICE); |
| 2102 | free_table: |
| 2103 | sg_free_table(&buf_desc->sgt[lnk->link_idx]); |
| 2104 | return rc; |
| 2105 | } |
| 2106 | |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 2107 | /* register a new buf on IB device, rmb or vzalloced sndbuf |
Karsten Graul | d550066 | 2020-05-01 12:48:05 +0200 | [diff] [blame] | 2108 | * must be called under lgr->llc_conf_mutex lock |
| 2109 | */ |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 2110 | int smcr_link_reg_buf(struct smc_link *link, struct smc_buf_desc *buf_desc) |
Karsten Graul | 7562a13 | 2020-05-01 12:48:01 +0200 | [diff] [blame] | 2111 | { |
| 2112 | if (list_empty(&link->lgr->list)) |
| 2113 | return -ENOLINK; |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 2114 | if (!buf_desc->is_reg_mr[link->link_idx]) { |
| 2115 | /* register memory region for new buf */ |
| 2116 | if (buf_desc->is_vm) |
| 2117 | buf_desc->mr[link->link_idx]->iova = |
| 2118 | (uintptr_t)buf_desc->cpu_addr; |
| 2119 | if (smc_wr_reg_send(link, buf_desc->mr[link->link_idx])) { |
| 2120 | buf_desc->is_reg_err = true; |
Karsten Graul | 7562a13 | 2020-05-01 12:48:01 +0200 | [diff] [blame] | 2121 | return -EFAULT; |
| 2122 | } |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 2123 | buf_desc->is_reg_mr[link->link_idx] = true; |
Karsten Graul | 7562a13 | 2020-05-01 12:48:01 +0200 | [diff] [blame] | 2124 | } |
| 2125 | return 0; |
| 2126 | } |
| 2127 | |
D. Wythe | aff7bfe | 2023-02-02 16:26:42 +0800 | [diff] [blame] | 2128 | static int _smcr_buf_map_lgr(struct smc_link *lnk, struct rw_semaphore *lock, |
Karsten Graul | fb33d277 | 2020-05-01 12:48:03 +0200 | [diff] [blame] | 2129 | struct list_head *lst, bool is_rmb) |
| 2130 | { |
| 2131 | struct smc_buf_desc *buf_desc, *bf; |
| 2132 | int rc = 0; |
| 2133 | |
D. Wythe | aff7bfe | 2023-02-02 16:26:42 +0800 | [diff] [blame] | 2134 | down_write(lock); |
Karsten Graul | fb33d277 | 2020-05-01 12:48:03 +0200 | [diff] [blame] | 2135 | list_for_each_entry_safe(buf_desc, bf, lst, list) { |
| 2136 | if (!buf_desc->used) |
| 2137 | continue; |
| 2138 | rc = smcr_buf_map_link(buf_desc, is_rmb, lnk); |
| 2139 | if (rc) |
| 2140 | goto out; |
| 2141 | } |
| 2142 | out: |
D. Wythe | aff7bfe | 2023-02-02 16:26:42 +0800 | [diff] [blame] | 2143 | up_write(lock); |
Karsten Graul | fb33d277 | 2020-05-01 12:48:03 +0200 | [diff] [blame] | 2144 | return rc; |
| 2145 | } |
| 2146 | |
| 2147 | /* map all used buffers of lgr for a new link */ |
| 2148 | int smcr_buf_map_lgr(struct smc_link *lnk) |
| 2149 | { |
| 2150 | struct smc_link_group *lgr = lnk->lgr; |
| 2151 | int i, rc = 0; |
| 2152 | |
| 2153 | for (i = 0; i < SMC_RMBE_SIZES; i++) { |
| 2154 | rc = _smcr_buf_map_lgr(lnk, &lgr->rmbs_lock, |
| 2155 | &lgr->rmbs[i], true); |
| 2156 | if (rc) |
| 2157 | return rc; |
| 2158 | rc = _smcr_buf_map_lgr(lnk, &lgr->sndbufs_lock, |
| 2159 | &lgr->sndbufs[i], false); |
| 2160 | if (rc) |
| 2161 | return rc; |
| 2162 | } |
| 2163 | return 0; |
| 2164 | } |
| 2165 | |
Karsten Graul | d550066 | 2020-05-01 12:48:05 +0200 | [diff] [blame] | 2166 | /* register all used buffers of lgr for a new link, |
| 2167 | * must be called under lgr->llc_conf_mutex lock |
| 2168 | */ |
Karsten Graul | fb33d277 | 2020-05-01 12:48:03 +0200 | [diff] [blame] | 2169 | int smcr_buf_reg_lgr(struct smc_link *lnk) |
| 2170 | { |
| 2171 | struct smc_link_group *lgr = lnk->lgr; |
| 2172 | struct smc_buf_desc *buf_desc, *bf; |
| 2173 | int i, rc = 0; |
| 2174 | |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 2175 | /* reg all RMBs for a new link */ |
D. Wythe | aff7bfe | 2023-02-02 16:26:42 +0800 | [diff] [blame] | 2176 | down_write(&lgr->rmbs_lock); |
Karsten Graul | fb33d277 | 2020-05-01 12:48:03 +0200 | [diff] [blame] | 2177 | for (i = 0; i < SMC_RMBE_SIZES; i++) { |
| 2178 | list_for_each_entry_safe(buf_desc, bf, &lgr->rmbs[i], list) { |
| 2179 | if (!buf_desc->used) |
| 2180 | continue; |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 2181 | rc = smcr_link_reg_buf(lnk, buf_desc); |
| 2182 | if (rc) { |
D. Wythe | aff7bfe | 2023-02-02 16:26:42 +0800 | [diff] [blame] | 2183 | up_write(&lgr->rmbs_lock); |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 2184 | return rc; |
| 2185 | } |
Karsten Graul | fb33d277 | 2020-05-01 12:48:03 +0200 | [diff] [blame] | 2186 | } |
| 2187 | } |
D. Wythe | aff7bfe | 2023-02-02 16:26:42 +0800 | [diff] [blame] | 2188 | up_write(&lgr->rmbs_lock); |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 2189 | |
| 2190 | if (lgr->buf_type == SMCR_PHYS_CONT_BUFS) |
| 2191 | return rc; |
| 2192 | |
| 2193 | /* reg all vzalloced sndbufs for a new link */ |
D. Wythe | aff7bfe | 2023-02-02 16:26:42 +0800 | [diff] [blame] | 2194 | down_write(&lgr->sndbufs_lock); |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 2195 | for (i = 0; i < SMC_RMBE_SIZES; i++) { |
| 2196 | list_for_each_entry_safe(buf_desc, bf, &lgr->sndbufs[i], list) { |
| 2197 | if (!buf_desc->used || !buf_desc->is_vm) |
| 2198 | continue; |
| 2199 | rc = smcr_link_reg_buf(lnk, buf_desc); |
| 2200 | if (rc) { |
D. Wythe | aff7bfe | 2023-02-02 16:26:42 +0800 | [diff] [blame] | 2201 | up_write(&lgr->sndbufs_lock); |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 2202 | return rc; |
| 2203 | } |
| 2204 | } |
| 2205 | } |
D. Wythe | aff7bfe | 2023-02-02 16:26:42 +0800 | [diff] [blame] | 2206 | up_write(&lgr->sndbufs_lock); |
Karsten Graul | fb33d277 | 2020-05-01 12:48:03 +0200 | [diff] [blame] | 2207 | return rc; |
| 2208 | } |
| 2209 | |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 2210 | static struct smc_buf_desc *smcr_new_buf_create(struct smc_link_group *lgr, |
| 2211 | bool is_rmb, int bufsize) |
Ursula Braun | b33982c | 2017-07-28 13:56:21 +0200 | [diff] [blame] | 2212 | { |
| 2213 | struct smc_buf_desc *buf_desc; |
Ursula Braun | b33982c | 2017-07-28 13:56:21 +0200 | [diff] [blame] | 2214 | |
| 2215 | /* try to alloc a new buffer */ |
| 2216 | buf_desc = kzalloc(sizeof(*buf_desc), GFP_KERNEL); |
| 2217 | if (!buf_desc) |
| 2218 | return ERR_PTR(-ENOMEM); |
| 2219 | |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 2220 | switch (lgr->buf_type) { |
| 2221 | case SMCR_PHYS_CONT_BUFS: |
| 2222 | case SMCR_MIXED_BUFS: |
| 2223 | buf_desc->order = get_order(bufsize); |
| 2224 | buf_desc->pages = alloc_pages(GFP_KERNEL | __GFP_NOWARN | |
| 2225 | __GFP_NOMEMALLOC | __GFP_COMP | |
| 2226 | __GFP_NORETRY | __GFP_ZERO, |
| 2227 | buf_desc->order); |
| 2228 | if (buf_desc->pages) { |
| 2229 | buf_desc->cpu_addr = |
| 2230 | (void *)page_address(buf_desc->pages); |
| 2231 | buf_desc->len = bufsize; |
| 2232 | buf_desc->is_vm = false; |
| 2233 | break; |
| 2234 | } |
| 2235 | if (lgr->buf_type == SMCR_PHYS_CONT_BUFS) |
| 2236 | goto out; |
| 2237 | fallthrough; // try virtually continguous buf |
| 2238 | case SMCR_VIRT_CONT_BUFS: |
| 2239 | buf_desc->order = get_order(bufsize); |
| 2240 | buf_desc->cpu_addr = vzalloc(PAGE_SIZE << buf_desc->order); |
| 2241 | if (!buf_desc->cpu_addr) |
| 2242 | goto out; |
| 2243 | buf_desc->pages = NULL; |
| 2244 | buf_desc->len = bufsize; |
| 2245 | buf_desc->is_vm = true; |
| 2246 | break; |
Ursula Braun | b33982c | 2017-07-28 13:56:21 +0200 | [diff] [blame] | 2247 | } |
Ursula Braun | b33982c | 2017-07-28 13:56:21 +0200 | [diff] [blame] | 2248 | return buf_desc; |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 2249 | |
| 2250 | out: |
| 2251 | kfree(buf_desc); |
| 2252 | return ERR_PTR(-EAGAIN); |
Ursula Braun | b33982c | 2017-07-28 13:56:21 +0200 | [diff] [blame] | 2253 | } |
| 2254 | |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 2255 | /* map buf_desc on all usable links, |
| 2256 | * unused buffers stay mapped as long as the link is up |
| 2257 | */ |
| 2258 | static int smcr_buf_map_usable_links(struct smc_link_group *lgr, |
| 2259 | struct smc_buf_desc *buf_desc, bool is_rmb) |
| 2260 | { |
Wen Gu | e738455 | 2022-09-20 14:43:09 +0800 | [diff] [blame] | 2261 | int i, rc = 0, cnt = 0; |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 2262 | |
Karsten Graul | d550066 | 2020-05-01 12:48:05 +0200 | [diff] [blame] | 2263 | /* protect against parallel link reconfiguration */ |
D. Wythe | f642101 | 2023-02-02 16:26:40 +0800 | [diff] [blame] | 2264 | down_read(&lgr->llc_conf_mutex); |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 2265 | for (i = 0; i < SMC_LINKS_PER_LGR_MAX; i++) { |
| 2266 | struct smc_link *lnk = &lgr->lnk[i]; |
| 2267 | |
Karsten Graul | d854fcb | 2020-04-29 17:10:43 +0200 | [diff] [blame] | 2268 | if (!smc_link_usable(lnk)) |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 2269 | continue; |
| 2270 | if (smcr_buf_map_link(buf_desc, is_rmb, lnk)) { |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 2271 | rc = -ENOMEM; |
| 2272 | goto out; |
| 2273 | } |
Wen Gu | e738455 | 2022-09-20 14:43:09 +0800 | [diff] [blame] | 2274 | cnt++; |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 2275 | } |
| 2276 | out: |
D. Wythe | f642101 | 2023-02-02 16:26:40 +0800 | [diff] [blame] | 2277 | up_read(&lgr->llc_conf_mutex); |
Wen Gu | e738455 | 2022-09-20 14:43:09 +0800 | [diff] [blame] | 2278 | if (!rc && !cnt) |
| 2279 | rc = -EINVAL; |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 2280 | return rc; |
| 2281 | } |
| 2282 | |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 2283 | static struct smc_buf_desc *smcd_new_buf_create(struct smc_link_group *lgr, |
| 2284 | bool is_dmb, int bufsize) |
| 2285 | { |
| 2286 | struct smc_buf_desc *buf_desc; |
| 2287 | int rc; |
| 2288 | |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 2289 | /* try to alloc a new DMB */ |
| 2290 | buf_desc = kzalloc(sizeof(*buf_desc), GFP_KERNEL); |
| 2291 | if (!buf_desc) |
| 2292 | return ERR_PTR(-ENOMEM); |
| 2293 | if (is_dmb) { |
| 2294 | rc = smc_ism_register_dmb(lgr, bufsize, buf_desc); |
| 2295 | if (rc) { |
| 2296 | kfree(buf_desc); |
Karsten Graul | 96d6fde | 2020-10-23 20:48:29 +0200 | [diff] [blame] | 2297 | if (rc == -ENOMEM) |
| 2298 | return ERR_PTR(-EAGAIN); |
| 2299 | if (rc == -ENOSPC) |
| 2300 | return ERR_PTR(-ENOSPC); |
| 2301 | return ERR_PTR(-EIO); |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 2302 | } |
Hans Wippel | be244f2 | 2018-06-28 19:05:10 +0200 | [diff] [blame] | 2303 | buf_desc->pages = virt_to_page(buf_desc->cpu_addr); |
| 2304 | /* CDC header stored in buf. So, pretend it was smaller */ |
| 2305 | buf_desc->len = bufsize - sizeof(struct smcd_cdc_msg); |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 2306 | } else { |
| 2307 | buf_desc->cpu_addr = kzalloc(bufsize, GFP_KERNEL | |
| 2308 | __GFP_NOWARN | __GFP_NORETRY | |
| 2309 | __GFP_NOMEMALLOC); |
| 2310 | if (!buf_desc->cpu_addr) { |
| 2311 | kfree(buf_desc); |
| 2312 | return ERR_PTR(-EAGAIN); |
| 2313 | } |
| 2314 | buf_desc->len = bufsize; |
| 2315 | } |
| 2316 | return buf_desc; |
| 2317 | } |
| 2318 | |
| 2319 | static int __smc_buf_create(struct smc_sock *smc, bool is_smcd, bool is_rmb) |
Ursula Braun | cd6851f | 2017-01-09 16:55:18 +0100 | [diff] [blame] | 2320 | { |
Hans Wippel | 8437bda | 2018-05-18 09:34:16 +0200 | [diff] [blame] | 2321 | struct smc_buf_desc *buf_desc = ERR_PTR(-ENOMEM); |
Ursula Braun | cd6851f | 2017-01-09 16:55:18 +0100 | [diff] [blame] | 2322 | struct smc_connection *conn = &smc->conn; |
| 2323 | struct smc_link_group *lgr = conn->lgr; |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 2324 | struct list_head *buf_list; |
Gerd Bayer | 833bac7e | 2023-08-04 19:06:23 +0200 | [diff] [blame] | 2325 | int bufsize, bufsize_comp; |
D. Wythe | aff7bfe | 2023-02-02 16:26:42 +0800 | [diff] [blame] | 2326 | struct rw_semaphore *lock; /* lock buffer list */ |
Guvenc Gulce | e0e4b8f | 2021-06-16 16:52:55 +0200 | [diff] [blame] | 2327 | bool is_dgraded = false; |
Ursula Braun | cd6851f | 2017-01-09 16:55:18 +0100 | [diff] [blame] | 2328 | |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 2329 | if (is_rmb) |
| 2330 | /* use socket recv buffer size (w/o overhead) as start value */ |
Gerd Bayer | 833bac7e | 2023-08-04 19:06:23 +0200 | [diff] [blame] | 2331 | bufsize = smc->sk.sk_rcvbuf / 2; |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 2332 | else |
| 2333 | /* use socket send buffer size (w/o overhead) as start value */ |
Gerd Bayer | 833bac7e | 2023-08-04 19:06:23 +0200 | [diff] [blame] | 2334 | bufsize = smc->sk.sk_sndbuf / 2; |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 2335 | |
Gerd Bayer | 833bac7e | 2023-08-04 19:06:23 +0200 | [diff] [blame] | 2336 | for (bufsize_comp = smc_compress_bufsize(bufsize, is_smcd, is_rmb); |
| 2337 | bufsize_comp >= 0; bufsize_comp--) { |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 2338 | if (is_rmb) { |
| 2339 | lock = &lgr->rmbs_lock; |
Gerd Bayer | 833bac7e | 2023-08-04 19:06:23 +0200 | [diff] [blame] | 2340 | buf_list = &lgr->rmbs[bufsize_comp]; |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 2341 | } else { |
| 2342 | lock = &lgr->sndbufs_lock; |
Gerd Bayer | 833bac7e | 2023-08-04 19:06:23 +0200 | [diff] [blame] | 2343 | buf_list = &lgr->sndbufs[bufsize_comp]; |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 2344 | } |
Gerd Bayer | 833bac7e | 2023-08-04 19:06:23 +0200 | [diff] [blame] | 2345 | bufsize = smc_uncompress_bufsize(bufsize_comp); |
Ursula Braun | 9d8fb61 | 2017-07-28 13:56:19 +0200 | [diff] [blame] | 2346 | |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 2347 | /* check for reusable slot in the link group */ |
Gerd Bayer | 833bac7e | 2023-08-04 19:06:23 +0200 | [diff] [blame] | 2348 | buf_desc = smc_buf_get_slot(bufsize_comp, lock, buf_list); |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 2349 | if (buf_desc) { |
Guangguan Wang | 0ef69e7 | 2022-07-14 17:44:01 +0800 | [diff] [blame] | 2350 | buf_desc->is_dma_need_sync = 0; |
Guvenc Gulce | 194730a | 2021-06-16 16:52:58 +0200 | [diff] [blame] | 2351 | SMC_STAT_RMB_SIZE(smc, is_smcd, is_rmb, bufsize); |
| 2352 | SMC_STAT_BUF_REUSE(smc, is_smcd, is_rmb); |
Ursula Braun | cd6851f | 2017-01-09 16:55:18 +0100 | [diff] [blame] | 2353 | break; /* found reusable slot */ |
| 2354 | } |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 2355 | |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 2356 | if (is_smcd) |
| 2357 | buf_desc = smcd_new_buf_create(lgr, is_rmb, bufsize); |
| 2358 | else |
| 2359 | buf_desc = smcr_new_buf_create(lgr, is_rmb, bufsize); |
| 2360 | |
Ursula Braun | b33982c | 2017-07-28 13:56:21 +0200 | [diff] [blame] | 2361 | if (PTR_ERR(buf_desc) == -ENOMEM) |
| 2362 | break; |
Guvenc Gulce | e0e4b8f | 2021-06-16 16:52:55 +0200 | [diff] [blame] | 2363 | if (IS_ERR(buf_desc)) { |
| 2364 | if (!is_dgraded) { |
| 2365 | is_dgraded = true; |
Guvenc Gulce | 194730a | 2021-06-16 16:52:58 +0200 | [diff] [blame] | 2366 | SMC_STAT_RMB_DOWNGRADED(smc, is_smcd, is_rmb); |
Guvenc Gulce | e0e4b8f | 2021-06-16 16:52:55 +0200 | [diff] [blame] | 2367 | } |
Ursula Braun | cd6851f | 2017-01-09 16:55:18 +0100 | [diff] [blame] | 2368 | continue; |
Guvenc Gulce | e0e4b8f | 2021-06-16 16:52:55 +0200 | [diff] [blame] | 2369 | } |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 2370 | |
Guvenc Gulce | 194730a | 2021-06-16 16:52:58 +0200 | [diff] [blame] | 2371 | SMC_STAT_RMB_ALLOC(smc, is_smcd, is_rmb); |
| 2372 | SMC_STAT_RMB_SIZE(smc, is_smcd, is_rmb, bufsize); |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 2373 | buf_desc->used = 1; |
D. Wythe | aff7bfe | 2023-02-02 16:26:42 +0800 | [diff] [blame] | 2374 | down_write(lock); |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 2375 | list_add(&buf_desc->list, buf_list); |
D. Wythe | aff7bfe | 2023-02-02 16:26:42 +0800 | [diff] [blame] | 2376 | up_write(lock); |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 2377 | break; /* found */ |
Ursula Braun | cd6851f | 2017-01-09 16:55:18 +0100 | [diff] [blame] | 2378 | } |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 2379 | |
Ursula Braun | b33982c | 2017-07-28 13:56:21 +0200 | [diff] [blame] | 2380 | if (IS_ERR(buf_desc)) |
Karsten Graul | 72b7f6c | 2020-07-26 20:34:28 +0200 | [diff] [blame] | 2381 | return PTR_ERR(buf_desc); |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 2382 | |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 2383 | if (!is_smcd) { |
| 2384 | if (smcr_buf_map_usable_links(lgr, buf_desc, is_rmb)) { |
Wen Gu | b8d1994 | 2022-07-14 17:44:04 +0800 | [diff] [blame] | 2385 | smcr_buf_unuse(buf_desc, is_rmb, lgr); |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 2386 | return -ENOMEM; |
| 2387 | } |
| 2388 | } |
| 2389 | |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 2390 | if (is_rmb) { |
| 2391 | conn->rmb_desc = buf_desc; |
Gerd Bayer | 833bac7e | 2023-08-04 19:06:23 +0200 | [diff] [blame] | 2392 | conn->rmbe_size_comp = bufsize_comp; |
| 2393 | smc->sk.sk_rcvbuf = bufsize * 2; |
Ursula Braun | 5f08318 | 2017-01-09 16:55:22 +0100 | [diff] [blame] | 2394 | atomic_set(&conn->bytes_to_rcv, 0); |
Hans Wippel | be244f2 | 2018-06-28 19:05:10 +0200 | [diff] [blame] | 2395 | conn->rmbe_update_limit = |
| 2396 | smc_rmb_wnd_update_limit(buf_desc->len); |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 2397 | if (is_smcd) |
| 2398 | smc_ism_set_conn(conn); /* map RMB/smcd_dev to conn */ |
Ursula Braun | cd6851f | 2017-01-09 16:55:18 +0100 | [diff] [blame] | 2399 | } else { |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 2400 | conn->sndbuf_desc = buf_desc; |
Gerd Bayer | 833bac7e | 2023-08-04 19:06:23 +0200 | [diff] [blame] | 2401 | smc->sk.sk_sndbuf = bufsize * 2; |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 2402 | atomic_set(&conn->sndbuf_space, bufsize); |
Ursula Braun | cd6851f | 2017-01-09 16:55:18 +0100 | [diff] [blame] | 2403 | } |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 2404 | return 0; |
| 2405 | } |
| 2406 | |
Ursula Braun | 10428dd | 2017-07-28 13:56:22 +0200 | [diff] [blame] | 2407 | void smc_sndbuf_sync_sg_for_device(struct smc_connection *conn) |
| 2408 | { |
Guangguan Wang | 0ef69e7 | 2022-07-14 17:44:01 +0800 | [diff] [blame] | 2409 | if (!conn->sndbuf_desc->is_dma_need_sync) |
| 2410 | return; |
Wen Gu | ea89c6c | 2022-01-13 16:36:41 +0800 | [diff] [blame] | 2411 | if (!smc_conn_lgr_valid(conn) || conn->lgr->is_smcd || |
| 2412 | !smc_link_active(conn->lnk)) |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 2413 | return; |
Karsten Graul | 387707f | 2020-04-29 17:10:40 +0200 | [diff] [blame] | 2414 | smc_ib_sync_sg_for_device(conn->lnk, conn->sndbuf_desc, DMA_TO_DEVICE); |
Ursula Braun | 10428dd | 2017-07-28 13:56:22 +0200 | [diff] [blame] | 2415 | } |
| 2416 | |
| 2417 | void smc_rmb_sync_sg_for_cpu(struct smc_connection *conn) |
| 2418 | { |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 2419 | int i; |
Ursula Braun | 10428dd | 2017-07-28 13:56:22 +0200 | [diff] [blame] | 2420 | |
Guangguan Wang | 0ef69e7 | 2022-07-14 17:44:01 +0800 | [diff] [blame] | 2421 | if (!conn->rmb_desc->is_dma_need_sync) |
| 2422 | return; |
Wen Gu | ea89c6c | 2022-01-13 16:36:41 +0800 | [diff] [blame] | 2423 | if (!smc_conn_lgr_valid(conn) || conn->lgr->is_smcd) |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 2424 | return; |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 2425 | for (i = 0; i < SMC_LINKS_PER_LGR_MAX; i++) { |
Karsten Graul | 741a49a | 2020-07-18 15:06:16 +0200 | [diff] [blame] | 2426 | if (!smc_link_active(&conn->lgr->lnk[i])) |
Karsten Graul | b924754 | 2020-04-29 17:10:41 +0200 | [diff] [blame] | 2427 | continue; |
| 2428 | smc_ib_sync_sg_for_cpu(&conn->lgr->lnk[i], conn->rmb_desc, |
| 2429 | DMA_FROM_DEVICE); |
| 2430 | } |
Ursula Braun | 10428dd | 2017-07-28 13:56:22 +0200 | [diff] [blame] | 2431 | } |
| 2432 | |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 2433 | /* create the send and receive buffer for an SMC socket; |
| 2434 | * receive buffers are called RMBs; |
| 2435 | * (even though the SMC protocol allows more than one RMB-element per RMB, |
| 2436 | * the Linux implementation uses just one RMB-element per RMB, i.e. uses an |
| 2437 | * extra RMB for every connection in a link group |
| 2438 | */ |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 2439 | int smc_buf_create(struct smc_sock *smc, bool is_smcd) |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 2440 | { |
| 2441 | int rc; |
| 2442 | |
| 2443 | /* create send buffer */ |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 2444 | rc = __smc_buf_create(smc, is_smcd, false); |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 2445 | if (rc) |
| 2446 | return rc; |
| 2447 | /* create rmb */ |
Hans Wippel | c6ba7c9 | 2018-06-28 19:05:07 +0200 | [diff] [blame] | 2448 | rc = __smc_buf_create(smc, is_smcd, true); |
Karsten Graul | fd7f3a7 | 2020-07-18 15:06:17 +0200 | [diff] [blame] | 2449 | if (rc) { |
D. Wythe | aff7bfe | 2023-02-02 16:26:42 +0800 | [diff] [blame] | 2450 | down_write(&smc->conn.lgr->sndbufs_lock); |
Karsten Graul | fd7f3a7 | 2020-07-18 15:06:17 +0200 | [diff] [blame] | 2451 | list_del(&smc->conn.sndbuf_desc->list); |
D. Wythe | aff7bfe | 2023-02-02 16:26:42 +0800 | [diff] [blame] | 2452 | up_write(&smc->conn.lgr->sndbufs_lock); |
Hans Wippel | 6511aad | 2018-05-18 09:34:17 +0200 | [diff] [blame] | 2453 | smc_buf_free(smc->conn.lgr, false, smc->conn.sndbuf_desc); |
Ursula Braun | 1d8df41 | 2020-09-03 21:53:17 +0200 | [diff] [blame] | 2454 | smc->conn.sndbuf_desc = NULL; |
Karsten Graul | fd7f3a7 | 2020-07-18 15:06:17 +0200 | [diff] [blame] | 2455 | } |
Ursula Braun | 3e03472 | 2017-07-28 13:56:20 +0200 | [diff] [blame] | 2456 | return rc; |
Ursula Braun | cd6851f | 2017-01-09 16:55:18 +0100 | [diff] [blame] | 2457 | } |
Ursula Braun | bd4ad57 | 2017-01-09 16:55:20 +0100 | [diff] [blame] | 2458 | |
| 2459 | static inline int smc_rmb_reserve_rtoken_idx(struct smc_link_group *lgr) |
| 2460 | { |
| 2461 | int i; |
| 2462 | |
| 2463 | for_each_clear_bit(i, lgr->rtokens_used_mask, SMC_RMBS_PER_LGR_MAX) { |
| 2464 | if (!test_and_set_bit(i, lgr->rtokens_used_mask)) |
| 2465 | return i; |
| 2466 | } |
| 2467 | return -ENOSPC; |
| 2468 | } |
| 2469 | |
Karsten Graul | ba21abd | 2020-04-30 15:55:47 +0200 | [diff] [blame] | 2470 | static int smc_rtoken_find_by_link(struct smc_link_group *lgr, int lnk_idx, |
| 2471 | u32 rkey) |
| 2472 | { |
| 2473 | int i; |
| 2474 | |
| 2475 | for (i = 0; i < SMC_RMBS_PER_LGR_MAX; i++) { |
| 2476 | if (test_bit(i, lgr->rtokens_used_mask) && |
| 2477 | lgr->rtokens[i][lnk_idx].rkey == rkey) |
| 2478 | return i; |
| 2479 | } |
| 2480 | return -ENOENT; |
| 2481 | } |
| 2482 | |
| 2483 | /* set rtoken for a new link to an existing rmb */ |
| 2484 | void smc_rtoken_set(struct smc_link_group *lgr, int link_idx, int link_idx_new, |
| 2485 | __be32 nw_rkey_known, __be64 nw_vaddr, __be32 nw_rkey) |
| 2486 | { |
| 2487 | int rtok_idx; |
| 2488 | |
| 2489 | rtok_idx = smc_rtoken_find_by_link(lgr, link_idx, ntohl(nw_rkey_known)); |
| 2490 | if (rtok_idx == -ENOENT) |
| 2491 | return; |
| 2492 | lgr->rtokens[rtok_idx][link_idx_new].rkey = ntohl(nw_rkey); |
| 2493 | lgr->rtokens[rtok_idx][link_idx_new].dma_addr = be64_to_cpu(nw_vaddr); |
| 2494 | } |
| 2495 | |
| 2496 | /* set rtoken for a new link whose link_id is given */ |
| 2497 | void smc_rtoken_set2(struct smc_link_group *lgr, int rtok_idx, int link_id, |
| 2498 | __be64 nw_vaddr, __be32 nw_rkey) |
| 2499 | { |
| 2500 | u64 dma_addr = be64_to_cpu(nw_vaddr); |
| 2501 | u32 rkey = ntohl(nw_rkey); |
| 2502 | bool found = false; |
| 2503 | int link_idx; |
| 2504 | |
| 2505 | for (link_idx = 0; link_idx < SMC_LINKS_PER_LGR_MAX; link_idx++) { |
| 2506 | if (lgr->lnk[link_idx].link_id == link_id) { |
| 2507 | found = true; |
| 2508 | break; |
| 2509 | } |
| 2510 | } |
| 2511 | if (!found) |
| 2512 | return; |
| 2513 | lgr->rtokens[rtok_idx][link_idx].rkey = rkey; |
| 2514 | lgr->rtokens[rtok_idx][link_idx].dma_addr = dma_addr; |
| 2515 | } |
| 2516 | |
Karsten Graul | 4ed75de | 2018-03-01 13:51:30 +0100 | [diff] [blame] | 2517 | /* add a new rtoken from peer */ |
Karsten Graul | 387707f | 2020-04-29 17:10:40 +0200 | [diff] [blame] | 2518 | int smc_rtoken_add(struct smc_link *lnk, __be64 nw_vaddr, __be32 nw_rkey) |
Ursula Braun | bd4ad57 | 2017-01-09 16:55:20 +0100 | [diff] [blame] | 2519 | { |
Karsten Graul | 387707f | 2020-04-29 17:10:40 +0200 | [diff] [blame] | 2520 | struct smc_link_group *lgr = smc_get_lgr(lnk); |
Karsten Graul | 4ed75de | 2018-03-01 13:51:30 +0100 | [diff] [blame] | 2521 | u64 dma_addr = be64_to_cpu(nw_vaddr); |
| 2522 | u32 rkey = ntohl(nw_rkey); |
Ursula Braun | bd4ad57 | 2017-01-09 16:55:20 +0100 | [diff] [blame] | 2523 | int i; |
| 2524 | |
| 2525 | for (i = 0; i < SMC_RMBS_PER_LGR_MAX; i++) { |
Karsten Graul | 387707f | 2020-04-29 17:10:40 +0200 | [diff] [blame] | 2526 | if (lgr->rtokens[i][lnk->link_idx].rkey == rkey && |
| 2527 | lgr->rtokens[i][lnk->link_idx].dma_addr == dma_addr && |
Ursula Braun | bd4ad57 | 2017-01-09 16:55:20 +0100 | [diff] [blame] | 2528 | test_bit(i, lgr->rtokens_used_mask)) { |
Karsten Graul | 4ed75de | 2018-03-01 13:51:30 +0100 | [diff] [blame] | 2529 | /* already in list */ |
| 2530 | return i; |
| 2531 | } |
| 2532 | } |
| 2533 | i = smc_rmb_reserve_rtoken_idx(lgr); |
| 2534 | if (i < 0) |
| 2535 | return i; |
Karsten Graul | 387707f | 2020-04-29 17:10:40 +0200 | [diff] [blame] | 2536 | lgr->rtokens[i][lnk->link_idx].rkey = rkey; |
| 2537 | lgr->rtokens[i][lnk->link_idx].dma_addr = dma_addr; |
Karsten Graul | 4ed75de | 2018-03-01 13:51:30 +0100 | [diff] [blame] | 2538 | return i; |
| 2539 | } |
| 2540 | |
Karsten Graul | e07d31d | 2020-04-29 17:10:42 +0200 | [diff] [blame] | 2541 | /* delete an rtoken from all links */ |
Karsten Graul | 387707f | 2020-04-29 17:10:40 +0200 | [diff] [blame] | 2542 | int smc_rtoken_delete(struct smc_link *lnk, __be32 nw_rkey) |
Karsten Graul | 4ed75de | 2018-03-01 13:51:30 +0100 | [diff] [blame] | 2543 | { |
Karsten Graul | 387707f | 2020-04-29 17:10:40 +0200 | [diff] [blame] | 2544 | struct smc_link_group *lgr = smc_get_lgr(lnk); |
Karsten Graul | 4ed75de | 2018-03-01 13:51:30 +0100 | [diff] [blame] | 2545 | u32 rkey = ntohl(nw_rkey); |
Karsten Graul | e07d31d | 2020-04-29 17:10:42 +0200 | [diff] [blame] | 2546 | int i, j; |
Karsten Graul | 4ed75de | 2018-03-01 13:51:30 +0100 | [diff] [blame] | 2547 | |
| 2548 | for (i = 0; i < SMC_RMBS_PER_LGR_MAX; i++) { |
Karsten Graul | 387707f | 2020-04-29 17:10:40 +0200 | [diff] [blame] | 2549 | if (lgr->rtokens[i][lnk->link_idx].rkey == rkey && |
Karsten Graul | 4ed75de | 2018-03-01 13:51:30 +0100 | [diff] [blame] | 2550 | test_bit(i, lgr->rtokens_used_mask)) { |
Karsten Graul | e07d31d | 2020-04-29 17:10:42 +0200 | [diff] [blame] | 2551 | for (j = 0; j < SMC_LINKS_PER_LGR_MAX; j++) { |
| 2552 | lgr->rtokens[i][j].rkey = 0; |
| 2553 | lgr->rtokens[i][j].dma_addr = 0; |
| 2554 | } |
Karsten Graul | 4ed75de | 2018-03-01 13:51:30 +0100 | [diff] [blame] | 2555 | clear_bit(i, lgr->rtokens_used_mask); |
Ursula Braun | bd4ad57 | 2017-01-09 16:55:20 +0100 | [diff] [blame] | 2556 | return 0; |
| 2557 | } |
| 2558 | } |
Karsten Graul | 4ed75de | 2018-03-01 13:51:30 +0100 | [diff] [blame] | 2559 | return -ENOENT; |
| 2560 | } |
| 2561 | |
| 2562 | /* save rkey and dma_addr received from peer during clc handshake */ |
| 2563 | int smc_rmb_rtoken_handling(struct smc_connection *conn, |
Karsten Graul | e07d31d | 2020-04-29 17:10:42 +0200 | [diff] [blame] | 2564 | struct smc_link *lnk, |
Karsten Graul | 4ed75de | 2018-03-01 13:51:30 +0100 | [diff] [blame] | 2565 | struct smc_clc_msg_accept_confirm *clc) |
| 2566 | { |
Ursula Braun | 3d9725a | 2020-09-10 18:48:23 +0200 | [diff] [blame] | 2567 | conn->rtoken_idx = smc_rtoken_add(lnk, clc->r0.rmb_dma_addr, |
| 2568 | clc->r0.rmb_rkey); |
Ursula Braun | bd4ad57 | 2017-01-09 16:55:20 +0100 | [diff] [blame] | 2569 | if (conn->rtoken_idx < 0) |
| 2570 | return conn->rtoken_idx; |
Ursula Braun | bd4ad57 | 2017-01-09 16:55:20 +0100 | [diff] [blame] | 2571 | return 0; |
| 2572 | } |
Hans Wippel | 9fda351 | 2018-05-18 09:34:11 +0200 | [diff] [blame] | 2573 | |
Ursula Braun | c3d9494 | 2019-10-09 10:07:46 +0200 | [diff] [blame] | 2574 | static void smc_core_going_away(void) |
| 2575 | { |
| 2576 | struct smc_ib_device *smcibdev; |
| 2577 | struct smcd_dev *smcd; |
| 2578 | |
Ursula Braun | 92f3cb0 | 2020-07-08 17:05:13 +0200 | [diff] [blame] | 2579 | mutex_lock(&smc_ib_devices.mutex); |
Ursula Braun | c3d9494 | 2019-10-09 10:07:46 +0200 | [diff] [blame] | 2580 | list_for_each_entry(smcibdev, &smc_ib_devices.list, list) { |
| 2581 | int i; |
| 2582 | |
| 2583 | for (i = 0; i < SMC_MAX_PORTS; i++) |
| 2584 | set_bit(i, smcibdev->ports_going_away); |
| 2585 | } |
Ursula Braun | 92f3cb0 | 2020-07-08 17:05:13 +0200 | [diff] [blame] | 2586 | mutex_unlock(&smc_ib_devices.mutex); |
Ursula Braun | c3d9494 | 2019-10-09 10:07:46 +0200 | [diff] [blame] | 2587 | |
Ursula Braun | 82087c0 | 2020-07-08 17:05:14 +0200 | [diff] [blame] | 2588 | mutex_lock(&smcd_dev_list.mutex); |
Ursula Braun | c3d9494 | 2019-10-09 10:07:46 +0200 | [diff] [blame] | 2589 | list_for_each_entry(smcd, &smcd_dev_list.list, list) { |
| 2590 | smcd->going_away = 1; |
| 2591 | } |
Ursula Braun | 82087c0 | 2020-07-08 17:05:14 +0200 | [diff] [blame] | 2592 | mutex_unlock(&smcd_dev_list.mutex); |
Ursula Braun | c3d9494 | 2019-10-09 10:07:46 +0200 | [diff] [blame] | 2593 | } |
| 2594 | |
Ursula Braun | 5421ec2 | 2019-11-14 13:02:42 +0100 | [diff] [blame] | 2595 | /* Clean up all SMC link groups */ |
| 2596 | static void smc_lgrs_shutdown(void) |
Hans Wippel | 9fda351 | 2018-05-18 09:34:11 +0200 | [diff] [blame] | 2597 | { |
Ursula Braun | a2351c5 | 2019-10-09 10:07:43 +0200 | [diff] [blame] | 2598 | struct smcd_dev *smcd; |
Hans Wippel | 9fda351 | 2018-05-18 09:34:11 +0200 | [diff] [blame] | 2599 | |
Ursula Braun | c3d9494 | 2019-10-09 10:07:46 +0200 | [diff] [blame] | 2600 | smc_core_going_away(); |
| 2601 | |
Ursula Braun | 0b29ec64 | 2019-11-14 13:02:47 +0100 | [diff] [blame] | 2602 | smc_smcr_terminate_all(NULL); |
Ursula Braun | a2351c5 | 2019-10-09 10:07:43 +0200 | [diff] [blame] | 2603 | |
Ursula Braun | 82087c0 | 2020-07-08 17:05:14 +0200 | [diff] [blame] | 2604 | mutex_lock(&smcd_dev_list.mutex); |
Ursula Braun | a2351c5 | 2019-10-09 10:07:43 +0200 | [diff] [blame] | 2605 | list_for_each_entry(smcd, &smcd_dev_list.list, list) |
Ursula Braun | 5421ec2 | 2019-11-14 13:02:42 +0100 | [diff] [blame] | 2606 | smc_smcd_terminate_all(smcd); |
Ursula Braun | 82087c0 | 2020-07-08 17:05:14 +0200 | [diff] [blame] | 2607 | mutex_unlock(&smcd_dev_list.mutex); |
Hans Wippel | 9fda351 | 2018-05-18 09:34:11 +0200 | [diff] [blame] | 2608 | } |
Ursula Braun | 5421ec2 | 2019-11-14 13:02:42 +0100 | [diff] [blame] | 2609 | |
Ursula Braun | a33a803 | 2019-11-16 17:47:30 +0100 | [diff] [blame] | 2610 | static int smc_core_reboot_event(struct notifier_block *this, |
| 2611 | unsigned long event, void *ptr) |
| 2612 | { |
| 2613 | smc_lgrs_shutdown(); |
Karsten Graul | 28a3b84 | 2019-12-19 12:51:13 +0100 | [diff] [blame] | 2614 | smc_ib_unregister_client(); |
Stefan Raspl | 8747716 | 2023-01-23 19:17:49 +0100 | [diff] [blame] | 2615 | smc_ism_exit(); |
Ursula Braun | a33a803 | 2019-11-16 17:47:30 +0100 | [diff] [blame] | 2616 | return 0; |
| 2617 | } |
| 2618 | |
| 2619 | static struct notifier_block smc_reboot_notifier = { |
| 2620 | .notifier_call = smc_core_reboot_event, |
| 2621 | }; |
| 2622 | |
Ursula Braun | 6dabd40 | 2019-11-16 17:47:29 +0100 | [diff] [blame] | 2623 | int __init smc_core_init(void) |
| 2624 | { |
Ursula Braun | a33a803 | 2019-11-16 17:47:30 +0100 | [diff] [blame] | 2625 | return register_reboot_notifier(&smc_reboot_notifier); |
Ursula Braun | 6dabd40 | 2019-11-16 17:47:29 +0100 | [diff] [blame] | 2626 | } |
| 2627 | |
Ursula Braun | 5421ec2 | 2019-11-14 13:02:42 +0100 | [diff] [blame] | 2628 | /* Called (from smc_exit) when module is removed */ |
| 2629 | void smc_core_exit(void) |
| 2630 | { |
Ursula Braun | a33a803 | 2019-11-16 17:47:30 +0100 | [diff] [blame] | 2631 | unregister_reboot_notifier(&smc_reboot_notifier); |
Ursula Braun | 5421ec2 | 2019-11-14 13:02:42 +0100 | [diff] [blame] | 2632 | smc_lgrs_shutdown(); |
| 2633 | } |