Thomas Gleixner | 2874c5f | 2019-05-27 08:55:01 +0200 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0-or-later |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 2 | /* Management of Tx window, Tx resend, ACKs and out-of-sequence reception |
| 3 | * |
| 4 | * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved. |
| 5 | * Written by David Howells (dhowells@redhat.com) |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 6 | */ |
| 7 | |
Joe Perches | 9b6d539 | 2016-06-02 12:08:52 -0700 | [diff] [blame] | 8 | #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt |
| 9 | |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 10 | #include <linux/module.h> |
| 11 | #include <linux/circ_buf.h> |
| 12 | #include <linux/net.h> |
| 13 | #include <linux/skbuff.h> |
Tejun Heo | 5a0e3ad | 2010-03-24 17:04:11 +0900 | [diff] [blame] | 14 | #include <linux/slab.h> |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 15 | #include <linux/udp.h> |
| 16 | #include <net/sock.h> |
| 17 | #include <net/af_rxrpc.h> |
| 18 | #include "ar-internal.h" |
| 19 | |
David Howells | 5873c08 | 2014-02-07 18:58:44 +0000 | [diff] [blame] | 20 | /* |
David Howells | a5af7e1 | 2016-10-06 08:11:49 +0100 | [diff] [blame] | 21 | * Propose a PING ACK be sent. |
| 22 | */ |
David Howells | 72f0c6f | 2020-01-30 21:48:13 +0000 | [diff] [blame] | 23 | void rxrpc_propose_ping(struct rxrpc_call *call, u32 serial, |
| 24 | enum rxrpc_propose_ack_trace why) |
David Howells | a5af7e1 | 2016-10-06 08:11:49 +0100 | [diff] [blame] | 25 | { |
David Howells | 72f0c6f | 2020-01-30 21:48:13 +0000 | [diff] [blame] | 26 | unsigned long now = jiffies; |
| 27 | unsigned long ping_at = now + rxrpc_idle_ack_delay; |
David Howells | a5af7e1 | 2016-10-06 08:11:49 +0100 | [diff] [blame] | 28 | |
David Howells | 72f0c6f | 2020-01-30 21:48:13 +0000 | [diff] [blame] | 29 | if (time_before(ping_at, call->ping_at)) { |
David Howells | 72f0c6f | 2020-01-30 21:48:13 +0000 | [diff] [blame] | 30 | WRITE_ONCE(call->ping_at, ping_at); |
| 31 | rxrpc_reduce_call_timer(call, ping_at, now, |
| 32 | rxrpc_timer_set_for_ping); |
David Howells | 530403d | 2020-01-30 21:48:14 +0000 | [diff] [blame] | 33 | trace_rxrpc_propose_ack(call, why, RXRPC_ACK_PING, serial); |
David Howells | a5af7e1 | 2016-10-06 08:11:49 +0100 | [diff] [blame] | 34 | } |
| 35 | } |
| 36 | |
| 37 | /* |
David Howells | 530403d | 2020-01-30 21:48:14 +0000 | [diff] [blame] | 38 | * Propose a DELAY ACK be sent in the future. |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 39 | */ |
David Howells | 4e76bd4 | 2022-05-06 16:13:13 +0100 | [diff] [blame] | 40 | void rxrpc_propose_delay_ACK(struct rxrpc_call *call, rxrpc_serial_t serial, |
| 41 | enum rxrpc_propose_ack_trace why) |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 42 | { |
David Howells | beb8e5e | 2017-11-24 10:18:41 +0000 | [diff] [blame] | 43 | unsigned long expiry = rxrpc_soft_ack_delay; |
David Howells | 72f0c6f | 2020-01-30 21:48:13 +0000 | [diff] [blame] | 44 | unsigned long now = jiffies, ack_at; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 45 | |
David Howells | 530403d | 2020-01-30 21:48:14 +0000 | [diff] [blame] | 46 | call->ackr_serial = serial; |
David Howells | f2a676d | 2022-05-11 14:01:25 +0100 | [diff] [blame] | 47 | |
David Howells | 530403d | 2020-01-30 21:48:14 +0000 | [diff] [blame] | 48 | if (rxrpc_soft_ack_delay < expiry) |
| 49 | expiry = rxrpc_soft_ack_delay; |
David Howells | 72f0c6f | 2020-01-30 21:48:13 +0000 | [diff] [blame] | 50 | if (call->peer->srtt_us != 0) |
| 51 | ack_at = usecs_to_jiffies(call->peer->srtt_us >> 3); |
| 52 | else |
| 53 | ack_at = expiry; |
David Howells | beb8e5e | 2017-11-24 10:18:41 +0000 | [diff] [blame] | 54 | |
David Howells | 72f0c6f | 2020-01-30 21:48:13 +0000 | [diff] [blame] | 55 | ack_at += READ_ONCE(call->tx_backoff); |
| 56 | ack_at += now; |
David Howells | 530403d | 2020-01-30 21:48:14 +0000 | [diff] [blame] | 57 | if (time_before(ack_at, call->delay_ack_at)) { |
| 58 | WRITE_ONCE(call->delay_ack_at, ack_at); |
David Howells | 72f0c6f | 2020-01-30 21:48:13 +0000 | [diff] [blame] | 59 | rxrpc_reduce_call_timer(call, ack_at, now, |
| 60 | rxrpc_timer_set_for_ack); |
David Howells | 248f219 | 2016-09-08 11:10:12 +0100 | [diff] [blame] | 61 | } |
David Howells | 9c7ad43 | 2016-09-23 13:50:40 +0100 | [diff] [blame] | 62 | |
David Howells | 530403d | 2020-01-30 21:48:14 +0000 | [diff] [blame] | 63 | trace_rxrpc_propose_ack(call, why, RXRPC_ACK_DELAY, serial); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 64 | } |
| 65 | |
| 66 | /* |
David Howells | 72f0c6f | 2020-01-30 21:48:13 +0000 | [diff] [blame] | 67 | * Queue an ACK for immediate transmission. |
| 68 | */ |
| 69 | void rxrpc_send_ACK(struct rxrpc_call *call, u8 ack_reason, |
| 70 | rxrpc_serial_t serial, enum rxrpc_propose_ack_trace why) |
| 71 | { |
David Howells | 72f0c6f | 2020-01-30 21:48:13 +0000 | [diff] [blame] | 72 | struct rxrpc_txbuf *txb; |
| 73 | |
| 74 | if (test_bit(RXRPC_CALL_DISCONNECTED, &call->flags)) |
| 75 | return; |
| 76 | |
| 77 | rxrpc_inc_stat(call->rxnet, stat_tx_acks[ack_reason]); |
| 78 | |
| 79 | txb = rxrpc_alloc_txbuf(call, RXRPC_PACKET_TYPE_ACK, |
David Howells | 446b3e1 | 2022-10-10 10:55:24 +0100 | [diff] [blame] | 80 | rcu_read_lock_held() ? GFP_ATOMIC | __GFP_NOWARN : GFP_NOFS); |
David Howells | 72f0c6f | 2020-01-30 21:48:13 +0000 | [diff] [blame] | 81 | if (!txb) { |
| 82 | kleave(" = -ENOMEM"); |
| 83 | return; |
| 84 | } |
| 85 | |
| 86 | txb->ack_why = why; |
| 87 | txb->wire.seq = 0; |
| 88 | txb->wire.type = RXRPC_PACKET_TYPE_ACK; |
| 89 | txb->wire.flags |= RXRPC_SLOW_START_OK; |
| 90 | txb->ack.bufferSpace = 0; |
| 91 | txb->ack.maxSkew = 0; |
| 92 | txb->ack.firstPacket = 0; |
| 93 | txb->ack.previousPacket = 0; |
| 94 | txb->ack.serial = htonl(serial); |
| 95 | txb->ack.reason = ack_reason; |
| 96 | txb->ack.nAcks = 0; |
| 97 | |
David Howells | 72f0c6f | 2020-01-30 21:48:13 +0000 | [diff] [blame] | 98 | trace_rxrpc_send_ack(call, why, ack_reason, serial); |
David Howells | b034684 | 2020-01-30 21:48:13 +0000 | [diff] [blame] | 99 | rxrpc_send_ack_packet(call, txb); |
| 100 | rxrpc_put_txbuf(txb, rxrpc_txbuf_put_ack_tx); |
David Howells | 72f0c6f | 2020-01-30 21:48:13 +0000 | [diff] [blame] | 101 | } |
| 102 | |
| 103 | /* |
David Howells | 5749434 | 2016-09-24 18:05:27 +0100 | [diff] [blame] | 104 | * Handle congestion being detected by the retransmit timeout. |
| 105 | */ |
| 106 | static void rxrpc_congestion_timeout(struct rxrpc_call *call) |
| 107 | { |
| 108 | set_bit(RXRPC_CALL_RETRANS_TIMEOUT, &call->flags); |
| 109 | } |
| 110 | |
| 111 | /* |
David Howells | 248f219 | 2016-09-08 11:10:12 +0100 | [diff] [blame] | 112 | * Perform retransmission of NAK'd and unack'd packets. |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 113 | */ |
David Howells | 5e6ef4f | 2020-01-23 13:13:41 +0000 | [diff] [blame] | 114 | void rxrpc_resend(struct rxrpc_call *call, struct sk_buff *ack_skb) |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 115 | { |
David Howells | d57a3a1 | 2022-05-07 10:06:13 +0100 | [diff] [blame] | 116 | struct rxrpc_ackpacket *ack = NULL; |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 117 | struct rxrpc_txbuf *txb; |
David Howells | 2c13c05 | 2022-01-21 23:12:58 +0000 | [diff] [blame] | 118 | unsigned long resend_at; |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 119 | rxrpc_seq_t transmitted = READ_ONCE(call->tx_transmitted); |
David Howells | c410bf01 | 2020-05-11 14:54:34 +0100 | [diff] [blame] | 120 | ktime_t now, max_age, oldest, ack_ts; |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 121 | bool unacked = false; |
David Howells | d57a3a1 | 2022-05-07 10:06:13 +0100 | [diff] [blame] | 122 | unsigned int i; |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 123 | LIST_HEAD(retrans_queue); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 124 | |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 125 | _enter("{%d,%d}", call->acks_hard_ack, call->tx_top); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 126 | |
David Howells | a158bdd | 2017-11-24 10:18:41 +0000 | [diff] [blame] | 127 | now = ktime_get_real(); |
David Howells | 214a9dc | 2022-04-05 13:34:09 +0100 | [diff] [blame] | 128 | max_age = ktime_sub_us(now, jiffies_to_usecs(call->peer->rto_j)); |
David Howells | d57a3a1 | 2022-05-07 10:06:13 +0100 | [diff] [blame] | 129 | oldest = now; |
| 130 | |
David Howells | d57a3a1 | 2022-05-07 10:06:13 +0100 | [diff] [blame] | 131 | if (list_empty(&call->tx_buffer)) |
| 132 | goto no_resend; |
David Howells | 50235c4 | 2016-09-22 00:29:31 +0100 | [diff] [blame] | 133 | |
David Howells | d57a3a1 | 2022-05-07 10:06:13 +0100 | [diff] [blame] | 134 | if (list_empty(&call->tx_buffer)) |
| 135 | goto no_further_resend; |
| 136 | |
David Howells | 5e6ef4f | 2020-01-23 13:13:41 +0000 | [diff] [blame] | 137 | trace_rxrpc_resend(call, ack_skb); |
David Howells | d57a3a1 | 2022-05-07 10:06:13 +0100 | [diff] [blame] | 138 | txb = list_first_entry(&call->tx_buffer, struct rxrpc_txbuf, call_link); |
David Howells | 248f219 | 2016-09-08 11:10:12 +0100 | [diff] [blame] | 139 | |
David Howells | d57a3a1 | 2022-05-07 10:06:13 +0100 | [diff] [blame] | 140 | /* Scan the soft ACK table without dropping the lock and resend any |
| 141 | * explicitly NAK'd packets. |
| 142 | */ |
David Howells | 5e6ef4f | 2020-01-23 13:13:41 +0000 | [diff] [blame] | 143 | if (ack_skb) { |
| 144 | ack = (void *)ack_skb->data + sizeof(struct rxrpc_wire_header); |
| 145 | |
David Howells | d57a3a1 | 2022-05-07 10:06:13 +0100 | [diff] [blame] | 146 | for (i = 0; i < ack->nAcks; i++) { |
| 147 | rxrpc_seq_t seq; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 148 | |
David Howells | d57a3a1 | 2022-05-07 10:06:13 +0100 | [diff] [blame] | 149 | if (ack->acks[i] & 1) |
David Howells | 248f219 | 2016-09-08 11:10:12 +0100 | [diff] [blame] | 150 | continue; |
David Howells | d57a3a1 | 2022-05-07 10:06:13 +0100 | [diff] [blame] | 151 | seq = ntohl(ack->firstPacket) + i; |
| 152 | if (after(txb->seq, transmitted)) |
| 153 | break; |
| 154 | if (after(txb->seq, seq)) |
| 155 | continue; /* A new hard ACK probably came in */ |
| 156 | list_for_each_entry_from(txb, &call->tx_buffer, call_link) { |
| 157 | if (txb->seq == seq) |
| 158 | goto found_txb; |
David Howells | 248f219 | 2016-09-08 11:10:12 +0100 | [diff] [blame] | 159 | } |
David Howells | d57a3a1 | 2022-05-07 10:06:13 +0100 | [diff] [blame] | 160 | goto no_further_resend; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 161 | |
David Howells | d57a3a1 | 2022-05-07 10:06:13 +0100 | [diff] [blame] | 162 | found_txb: |
| 163 | if (after(ntohl(txb->wire.serial), call->acks_highest_serial)) |
| 164 | continue; /* Ack point not yet reached */ |
| 165 | |
| 166 | rxrpc_see_txbuf(txb, rxrpc_txbuf_see_unacked); |
| 167 | |
| 168 | if (list_empty(&txb->tx_link)) { |
David Howells | d57a3a1 | 2022-05-07 10:06:13 +0100 | [diff] [blame] | 169 | list_add_tail(&txb->tx_link, &retrans_queue); |
| 170 | set_bit(RXRPC_TXBUF_RESENT, &txb->flags); |
| 171 | } |
| 172 | |
| 173 | trace_rxrpc_retransmit(call, txb->seq, |
| 174 | ktime_to_ns(ktime_sub(txb->last_sent, |
| 175 | max_age))); |
| 176 | |
| 177 | if (list_is_last(&txb->call_link, &call->tx_buffer)) |
| 178 | goto no_further_resend; |
| 179 | txb = list_next_entry(txb, call_link); |
| 180 | } |
David Howells | dfa7d92 | 2016-09-17 10:49:12 +0100 | [diff] [blame] | 181 | } |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 182 | |
David Howells | d57a3a1 | 2022-05-07 10:06:13 +0100 | [diff] [blame] | 183 | /* Fast-forward through the Tx queue to the point the peer says it has |
| 184 | * seen. Anything between the soft-ACK table and that point will get |
| 185 | * ACK'd or NACK'd in due course, so don't worry about it here; here we |
| 186 | * need to consider retransmitting anything beyond that point. |
| 187 | * |
| 188 | * Note that ACK for a packet can beat the update of tx_transmitted. |
| 189 | */ |
| 190 | if (after_eq(READ_ONCE(call->acks_prev_seq), READ_ONCE(call->tx_transmitted))) |
| 191 | goto no_further_resend; |
| 192 | |
| 193 | list_for_each_entry_from(txb, &call->tx_buffer, call_link) { |
| 194 | if (before_eq(txb->seq, READ_ONCE(call->acks_prev_seq))) |
| 195 | continue; |
| 196 | if (after(txb->seq, READ_ONCE(call->tx_transmitted))) |
| 197 | break; /* Not transmitted yet */ |
| 198 | |
| 199 | if (ack && ack->reason == RXRPC_ACK_PING_RESPONSE && |
| 200 | before(ntohl(txb->wire.serial), ntohl(ack->serial))) |
| 201 | goto do_resend; /* Wasn't accounted for by a more recent ping. */ |
| 202 | |
| 203 | if (ktime_after(txb->last_sent, max_age)) { |
| 204 | if (ktime_before(txb->last_sent, oldest)) |
| 205 | oldest = txb->last_sent; |
| 206 | continue; |
| 207 | } |
| 208 | |
| 209 | do_resend: |
| 210 | unacked = true; |
| 211 | if (list_empty(&txb->tx_link)) { |
David Howells | d57a3a1 | 2022-05-07 10:06:13 +0100 | [diff] [blame] | 212 | list_add_tail(&txb->tx_link, &retrans_queue); |
| 213 | set_bit(RXRPC_TXBUF_RESENT, &txb->flags); |
| 214 | rxrpc_inc_stat(call->rxnet, stat_tx_data_retrans); |
| 215 | } |
| 216 | } |
| 217 | |
| 218 | no_further_resend: |
David Howells | d57a3a1 | 2022-05-07 10:06:13 +0100 | [diff] [blame] | 219 | no_resend: |
Marc Dionne | 59299aa | 2018-03-30 21:04:44 +0100 | [diff] [blame] | 220 | resend_at = nsecs_to_jiffies(ktime_to_ns(ktime_sub(now, oldest))); |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 221 | resend_at += jiffies + rxrpc_get_rto_backoff(call->peer, |
| 222 | !list_empty(&retrans_queue)); |
David Howells | a158bdd | 2017-11-24 10:18:41 +0000 | [diff] [blame] | 223 | WRITE_ONCE(call->resend_at, resend_at); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 224 | |
David Howells | 5749434 | 2016-09-24 18:05:27 +0100 | [diff] [blame] | 225 | if (unacked) |
| 226 | rxrpc_congestion_timeout(call); |
| 227 | |
| 228 | /* If there was nothing that needed retransmission then it's likely |
| 229 | * that an ACK got lost somewhere. Send a ping to find out instead of |
| 230 | * retransmitting data. |
| 231 | */ |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 232 | if (list_empty(&retrans_queue)) { |
David Howells | 5e6ef4f | 2020-01-23 13:13:41 +0000 | [diff] [blame] | 233 | rxrpc_reduce_call_timer(call, resend_at, jiffies, |
David Howells | a158bdd | 2017-11-24 10:18:41 +0000 | [diff] [blame] | 234 | rxrpc_timer_set_for_resend); |
David Howells | 5749434 | 2016-09-24 18:05:27 +0100 | [diff] [blame] | 235 | ack_ts = ktime_sub(now, call->acks_latest_ts); |
David Howells | c410bf01 | 2020-05-11 14:54:34 +0100 | [diff] [blame] | 236 | if (ktime_to_us(ack_ts) < (call->peer->srtt_us >> 3)) |
David Howells | 5749434 | 2016-09-24 18:05:27 +0100 | [diff] [blame] | 237 | goto out; |
David Howells | 72f0c6f | 2020-01-30 21:48:13 +0000 | [diff] [blame] | 238 | rxrpc_send_ACK(call, RXRPC_ACK_PING, 0, |
| 239 | rxrpc_propose_ack_ping_for_lost_ack); |
David Howells | 5749434 | 2016-09-24 18:05:27 +0100 | [diff] [blame] | 240 | goto out; |
| 241 | } |
| 242 | |
David Howells | 5e6ef4f | 2020-01-23 13:13:41 +0000 | [diff] [blame] | 243 | /* Retransmit the queue */ |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 244 | while ((txb = list_first_entry_or_null(&retrans_queue, |
| 245 | struct rxrpc_txbuf, tx_link))) { |
| 246 | list_del_init(&txb->tx_link); |
David Howells | 5e6ef4f | 2020-01-23 13:13:41 +0000 | [diff] [blame] | 247 | rxrpc_transmit_one(call, txb); |
David Howells | dfa7d92 | 2016-09-17 10:49:12 +0100 | [diff] [blame] | 248 | } |
David Howells | 248f219 | 2016-09-08 11:10:12 +0100 | [diff] [blame] | 249 | |
David Howells | 5749434 | 2016-09-24 18:05:27 +0100 | [diff] [blame] | 250 | out: |
David Howells | 248f219 | 2016-09-08 11:10:12 +0100 | [diff] [blame] | 251 | _leave(""); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 252 | } |
| 253 | |
David Howells | cf37b59 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 254 | static bool rxrpc_tx_window_has_space(struct rxrpc_call *call) |
| 255 | { |
| 256 | unsigned int winsize = min_t(unsigned int, call->tx_winsize, |
| 257 | call->cong_cwnd + call->cong_extra); |
| 258 | rxrpc_seq_t window = call->acks_hard_ack, wtop = window + winsize; |
| 259 | rxrpc_seq_t tx_top = call->tx_top; |
| 260 | int space; |
| 261 | |
| 262 | space = wtop - tx_top; |
| 263 | return space > 0; |
| 264 | } |
| 265 | |
| 266 | /* |
| 267 | * Decant some if the sendmsg prepared queue into the transmission buffer. |
| 268 | */ |
| 269 | static void rxrpc_decant_prepared_tx(struct rxrpc_call *call) |
| 270 | { |
| 271 | struct rxrpc_txbuf *txb; |
| 272 | |
David Howells | a343b17 | 2022-10-12 22:17:56 +0100 | [diff] [blame] | 273 | if (!test_bit(RXRPC_CALL_EXPOSED, &call->flags)) { |
| 274 | if (list_empty(&call->tx_sendmsg)) |
| 275 | return; |
David Howells | cf37b59 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 276 | rxrpc_expose_client_call(call); |
David Howells | a343b17 | 2022-10-12 22:17:56 +0100 | [diff] [blame] | 277 | } |
David Howells | cf37b59 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 278 | |
| 279 | while ((txb = list_first_entry_or_null(&call->tx_sendmsg, |
| 280 | struct rxrpc_txbuf, call_link))) { |
| 281 | spin_lock(&call->tx_lock); |
| 282 | list_del(&txb->call_link); |
| 283 | spin_unlock(&call->tx_lock); |
| 284 | |
| 285 | call->tx_top = txb->seq; |
| 286 | list_add_tail(&txb->call_link, &call->tx_buffer); |
| 287 | |
| 288 | rxrpc_transmit_one(call, txb); |
| 289 | |
David Howells | cf37b59 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 290 | if (!rxrpc_tx_window_has_space(call)) |
| 291 | break; |
| 292 | } |
| 293 | } |
| 294 | |
| 295 | static void rxrpc_transmit_some_data(struct rxrpc_call *call) |
| 296 | { |
| 297 | switch (call->state) { |
| 298 | case RXRPC_CALL_SERVER_ACK_REQUEST: |
| 299 | if (list_empty(&call->tx_sendmsg)) |
| 300 | return; |
| 301 | fallthrough; |
| 302 | |
| 303 | case RXRPC_CALL_SERVER_SEND_REPLY: |
| 304 | case RXRPC_CALL_SERVER_AWAIT_ACK: |
| 305 | case RXRPC_CALL_CLIENT_SEND_REQUEST: |
| 306 | case RXRPC_CALL_CLIENT_AWAIT_REPLY: |
| 307 | if (!rxrpc_tx_window_has_space(call)) |
| 308 | return; |
David Howells | 32cf8ed | 2022-11-11 13:47:35 +0000 | [diff] [blame] | 309 | if (list_empty(&call->tx_sendmsg)) { |
| 310 | rxrpc_inc_stat(call->rxnet, stat_tx_data_underflow); |
David Howells | cf37b59 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 311 | return; |
David Howells | 32cf8ed | 2022-11-11 13:47:35 +0000 | [diff] [blame] | 312 | } |
David Howells | cf37b59 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 313 | rxrpc_decant_prepared_tx(call); |
| 314 | break; |
| 315 | default: |
| 316 | return; |
| 317 | } |
| 318 | } |
| 319 | |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 320 | /* |
David Howells | 5e6ef4f | 2020-01-23 13:13:41 +0000 | [diff] [blame] | 321 | * Ping the other end to fill our RTT cache and to retrieve the rwind |
| 322 | * and MTU parameters. |
| 323 | */ |
| 324 | static void rxrpc_send_initial_ping(struct rxrpc_call *call) |
| 325 | { |
| 326 | if (call->peer->rtt_count < 3 || |
| 327 | ktime_before(ktime_add_ms(call->peer->rtt_last_req, 1000), |
| 328 | ktime_get_real())) |
| 329 | rxrpc_send_ACK(call, RXRPC_ACK_PING, 0, |
| 330 | rxrpc_propose_ack_ping_for_params); |
| 331 | } |
| 332 | |
| 333 | /* |
David Howells | 248f219 | 2016-09-08 11:10:12 +0100 | [diff] [blame] | 334 | * Handle retransmission and deferred ACK/abort generation. |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 335 | */ |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame^] | 336 | bool rxrpc_input_call_event(struct rxrpc_call *call, struct sk_buff *skb) |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 337 | { |
David Howells | a158bdd | 2017-11-24 10:18:41 +0000 | [diff] [blame] | 338 | unsigned long now, next, t; |
David Howells | 72f0c6f | 2020-01-30 21:48:13 +0000 | [diff] [blame] | 339 | rxrpc_serial_t ackr_serial; |
David Howells | 5e6ef4f | 2020-01-23 13:13:41 +0000 | [diff] [blame] | 340 | bool resend = false, expired = false; |
David Howells | a343b17 | 2022-10-12 22:17:56 +0100 | [diff] [blame] | 341 | s32 abort_code; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 342 | |
David Howells | cb0fc0c | 2022-10-21 14:39:26 +0100 | [diff] [blame] | 343 | rxrpc_see_call(call, rxrpc_call_see_input); |
David Howells | e34d423 | 2016-08-30 09:49:29 +0100 | [diff] [blame] | 344 | |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 345 | //printk("\n--------------------\n"); |
David Howells | 248f219 | 2016-09-08 11:10:12 +0100 | [diff] [blame] | 346 | _enter("{%d,%s,%lx}", |
| 347 | call->debug_id, rxrpc_call_states[call->state], call->events); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 348 | |
David Howells | 5e6ef4f | 2020-01-23 13:13:41 +0000 | [diff] [blame] | 349 | if (call->state == RXRPC_CALL_COMPLETE) |
David Howells | 3feda9d | 2022-11-25 09:00:55 +0000 | [diff] [blame] | 350 | goto out; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 351 | |
David Howells | a343b17 | 2022-10-12 22:17:56 +0100 | [diff] [blame] | 352 | /* Handle abort request locklessly, vs rxrpc_propose_abort(). */ |
| 353 | abort_code = smp_load_acquire(&call->send_abort); |
| 354 | if (abort_code) { |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame^] | 355 | rxrpc_abort_call(call, 0, call->send_abort, call->send_abort_err, |
| 356 | call->send_abort_why); |
David Howells | a343b17 | 2022-10-12 22:17:56 +0100 | [diff] [blame] | 357 | goto out; |
| 358 | } |
| 359 | |
David Howells | 5e6ef4f | 2020-01-23 13:13:41 +0000 | [diff] [blame] | 360 | if (skb && skb->mark == RXRPC_SKB_MARK_ERROR) |
| 361 | goto out; |
| 362 | |
| 363 | /* If we see our async-event poke, check for timeout trippage. */ |
David Howells | a158bdd | 2017-11-24 10:18:41 +0000 | [diff] [blame] | 364 | now = jiffies; |
| 365 | t = READ_ONCE(call->expect_rx_by); |
| 366 | if (time_after_eq(now, t)) { |
| 367 | trace_rxrpc_timer(call, rxrpc_timer_exp_normal, now); |
David Howells | 5e6ef4f | 2020-01-23 13:13:41 +0000 | [diff] [blame] | 368 | expired = true; |
David Howells | a158bdd | 2017-11-24 10:18:41 +0000 | [diff] [blame] | 369 | } |
| 370 | |
| 371 | t = READ_ONCE(call->expect_req_by); |
| 372 | if (call->state == RXRPC_CALL_SERVER_RECV_REQUEST && |
| 373 | time_after_eq(now, t)) { |
| 374 | trace_rxrpc_timer(call, rxrpc_timer_exp_idle, now); |
David Howells | 5e6ef4f | 2020-01-23 13:13:41 +0000 | [diff] [blame] | 375 | expired = true; |
David Howells | a158bdd | 2017-11-24 10:18:41 +0000 | [diff] [blame] | 376 | } |
| 377 | |
| 378 | t = READ_ONCE(call->expect_term_by); |
| 379 | if (time_after_eq(now, t)) { |
| 380 | trace_rxrpc_timer(call, rxrpc_timer_exp_hard, now); |
David Howells | 5e6ef4f | 2020-01-23 13:13:41 +0000 | [diff] [blame] | 381 | expired = true; |
David Howells | a158bdd | 2017-11-24 10:18:41 +0000 | [diff] [blame] | 382 | } |
| 383 | |
David Howells | 530403d | 2020-01-30 21:48:14 +0000 | [diff] [blame] | 384 | t = READ_ONCE(call->delay_ack_at); |
David Howells | a158bdd | 2017-11-24 10:18:41 +0000 | [diff] [blame] | 385 | if (time_after_eq(now, t)) { |
| 386 | trace_rxrpc_timer(call, rxrpc_timer_exp_ack, now); |
David Howells | 530403d | 2020-01-30 21:48:14 +0000 | [diff] [blame] | 387 | cmpxchg(&call->delay_ack_at, t, now + MAX_JIFFY_OFFSET); |
| 388 | ackr_serial = xchg(&call->ackr_serial, 0); |
| 389 | rxrpc_send_ACK(call, RXRPC_ACK_DELAY, ackr_serial, |
| 390 | rxrpc_propose_ack_ping_for_lost_ack); |
David Howells | a158bdd | 2017-11-24 10:18:41 +0000 | [diff] [blame] | 391 | } |
| 392 | |
David Howells | bd1fdf8 | 2017-11-24 10:18:42 +0000 | [diff] [blame] | 393 | t = READ_ONCE(call->ack_lost_at); |
| 394 | if (time_after_eq(now, t)) { |
| 395 | trace_rxrpc_timer(call, rxrpc_timer_exp_lost_ack, now); |
| 396 | cmpxchg(&call->ack_lost_at, t, now + MAX_JIFFY_OFFSET); |
| 397 | set_bit(RXRPC_CALL_EV_ACK_LOST, &call->events); |
| 398 | } |
| 399 | |
David Howells | 415f44e | 2017-11-24 10:18:42 +0000 | [diff] [blame] | 400 | t = READ_ONCE(call->keepalive_at); |
| 401 | if (time_after_eq(now, t)) { |
| 402 | trace_rxrpc_timer(call, rxrpc_timer_exp_keepalive, now); |
| 403 | cmpxchg(&call->keepalive_at, t, now + MAX_JIFFY_OFFSET); |
David Howells | 72f0c6f | 2020-01-30 21:48:13 +0000 | [diff] [blame] | 404 | rxrpc_send_ACK(call, RXRPC_ACK_PING, 0, |
| 405 | rxrpc_propose_ack_ping_for_keepalive); |
David Howells | 415f44e | 2017-11-24 10:18:42 +0000 | [diff] [blame] | 406 | } |
| 407 | |
David Howells | a158bdd | 2017-11-24 10:18:41 +0000 | [diff] [blame] | 408 | t = READ_ONCE(call->ping_at); |
| 409 | if (time_after_eq(now, t)) { |
| 410 | trace_rxrpc_timer(call, rxrpc_timer_exp_ping, now); |
| 411 | cmpxchg(&call->ping_at, t, now + MAX_JIFFY_OFFSET); |
David Howells | 72f0c6f | 2020-01-30 21:48:13 +0000 | [diff] [blame] | 412 | rxrpc_send_ACK(call, RXRPC_ACK_PING, 0, |
| 413 | rxrpc_propose_ack_ping_for_keepalive); |
David Howells | a158bdd | 2017-11-24 10:18:41 +0000 | [diff] [blame] | 414 | } |
| 415 | |
| 416 | t = READ_ONCE(call->resend_at); |
| 417 | if (time_after_eq(now, t)) { |
| 418 | trace_rxrpc_timer(call, rxrpc_timer_exp_resend, now); |
| 419 | cmpxchg(&call->resend_at, t, now + MAX_JIFFY_OFFSET); |
David Howells | 5e6ef4f | 2020-01-23 13:13:41 +0000 | [diff] [blame] | 420 | resend = true; |
David Howells | a158bdd | 2017-11-24 10:18:41 +0000 | [diff] [blame] | 421 | } |
| 422 | |
David Howells | 5e6ef4f | 2020-01-23 13:13:41 +0000 | [diff] [blame] | 423 | if (skb) |
| 424 | rxrpc_input_call_packet(call, skb); |
| 425 | |
David Howells | cf37b59 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 426 | rxrpc_transmit_some_data(call); |
| 427 | |
David Howells | 5086d9a | 2022-11-11 13:47:35 +0000 | [diff] [blame] | 428 | if (skb) { |
| 429 | struct rxrpc_skb_priv *sp = rxrpc_skb(skb); |
| 430 | |
| 431 | if (sp->hdr.type == RXRPC_PACKET_TYPE_ACK) |
| 432 | rxrpc_congestion_degrade(call); |
| 433 | } |
| 434 | |
David Howells | 5e6ef4f | 2020-01-23 13:13:41 +0000 | [diff] [blame] | 435 | if (test_and_clear_bit(RXRPC_CALL_EV_INITIAL_PING, &call->events)) |
| 436 | rxrpc_send_initial_ping(call); |
| 437 | |
David Howells | a158bdd | 2017-11-24 10:18:41 +0000 | [diff] [blame] | 438 | /* Process events */ |
David Howells | 5e6ef4f | 2020-01-23 13:13:41 +0000 | [diff] [blame] | 439 | if (expired) { |
David Howells | 1a02502 | 2018-06-03 02:17:39 +0100 | [diff] [blame] | 440 | if (test_bit(RXRPC_CALL_RX_HEARD, &call->flags) && |
| 441 | (int)call->conn->hi_serial - (int)call->rx_serial > 0) { |
| 442 | trace_rxrpc_call_reset(call); |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame^] | 443 | rxrpc_abort_call(call, 0, RX_CALL_DEAD, -ECONNRESET, |
| 444 | rxrpc_abort_call_reset); |
David Howells | 1a02502 | 2018-06-03 02:17:39 +0100 | [diff] [blame] | 445 | } else { |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame^] | 446 | rxrpc_abort_call(call, 0, RX_CALL_TIMEOUT, -ETIME, |
| 447 | rxrpc_abort_call_timeout); |
David Howells | 1a02502 | 2018-06-03 02:17:39 +0100 | [diff] [blame] | 448 | } |
David Howells | 5e6ef4f | 2020-01-23 13:13:41 +0000 | [diff] [blame] | 449 | goto out; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 450 | } |
| 451 | |
David Howells | 5e6ef4f | 2020-01-23 13:13:41 +0000 | [diff] [blame] | 452 | if (test_and_clear_bit(RXRPC_CALL_EV_ACK_LOST, &call->events)) |
David Howells | 72f0c6f | 2020-01-30 21:48:13 +0000 | [diff] [blame] | 453 | rxrpc_send_ACK(call, RXRPC_ACK_PING, 0, |
| 454 | rxrpc_propose_ack_ping_for_lost_ack); |
David Howells | a5af7e1 | 2016-10-06 08:11:49 +0100 | [diff] [blame] | 455 | |
David Howells | 5e6ef4f | 2020-01-23 13:13:41 +0000 | [diff] [blame] | 456 | if (resend && call->state != RXRPC_CALL_CLIENT_RECV_REPLY) |
| 457 | rxrpc_resend(call, NULL); |
| 458 | |
| 459 | if (test_and_clear_bit(RXRPC_CALL_RX_IS_IDLE, &call->flags)) |
| 460 | rxrpc_send_ACK(call, RXRPC_ACK_IDLE, 0, |
| 461 | rxrpc_propose_ack_rx_idle); |
| 462 | |
| 463 | if (atomic_read(&call->ackr_nr_unacked) > 2) |
| 464 | rxrpc_send_ACK(call, RXRPC_ACK_IDLE, 0, |
| 465 | rxrpc_propose_ack_input_data); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 466 | |
David Howells | a158bdd | 2017-11-24 10:18:41 +0000 | [diff] [blame] | 467 | /* Make sure the timer is restarted */ |
David Howells | 5e6ef4f | 2020-01-23 13:13:41 +0000 | [diff] [blame] | 468 | if (call->state != RXRPC_CALL_COMPLETE) { |
| 469 | next = call->expect_rx_by; |
David Howells | a158bdd | 2017-11-24 10:18:41 +0000 | [diff] [blame] | 470 | |
| 471 | #define set(T) { t = READ_ONCE(T); if (time_before(t, next)) next = t; } |
David Howells | 3d7682a | 2017-11-29 14:25:50 +0000 | [diff] [blame] | 472 | |
David Howells | 5e6ef4f | 2020-01-23 13:13:41 +0000 | [diff] [blame] | 473 | set(call->expect_req_by); |
| 474 | set(call->expect_term_by); |
| 475 | set(call->delay_ack_at); |
| 476 | set(call->ack_lost_at); |
| 477 | set(call->resend_at); |
| 478 | set(call->keepalive_at); |
| 479 | set(call->ping_at); |
David Howells | a158bdd | 2017-11-24 10:18:41 +0000 | [diff] [blame] | 480 | |
David Howells | 5e6ef4f | 2020-01-23 13:13:41 +0000 | [diff] [blame] | 481 | now = jiffies; |
| 482 | if (time_after_eq(now, next)) |
| 483 | rxrpc_poke_call(call, rxrpc_call_poke_timer_now); |
David Howells | a158bdd | 2017-11-24 10:18:41 +0000 | [diff] [blame] | 484 | |
David Howells | 5e6ef4f | 2020-01-23 13:13:41 +0000 | [diff] [blame] | 485 | rxrpc_reduce_call_timer(call, next, now, rxrpc_timer_restart); |
| 486 | } |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 487 | |
David Howells | 248f219 | 2016-09-08 11:10:12 +0100 | [diff] [blame] | 488 | out: |
David Howells | 03fc55a | 2022-10-12 17:01:25 +0100 | [diff] [blame] | 489 | if (call->state == RXRPC_CALL_COMPLETE) { |
David Howells | 5e6ef4f | 2020-01-23 13:13:41 +0000 | [diff] [blame] | 490 | del_timer_sync(&call->timer); |
David Howells | 03fc55a | 2022-10-12 17:01:25 +0100 | [diff] [blame] | 491 | if (!test_bit(RXRPC_CALL_DISCONNECTED, &call->flags)) |
| 492 | rxrpc_disconnect_call(call); |
| 493 | if (call->security) |
| 494 | call->security->free_call_crypto(call); |
| 495 | } |
David Howells | 5e6ef4f | 2020-01-23 13:13:41 +0000 | [diff] [blame] | 496 | if (call->acks_hard_ack != call->tx_bottom) |
| 497 | rxrpc_shrink_call_tx_buffer(call); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 498 | _leave(""); |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame^] | 499 | return true; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 500 | } |