Thomas Gleixner | 2874c5f | 2019-05-27 08:55:01 +0200 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0-or-later */ |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 2 | /* |
| 3 | * INET An implementation of the TCP/IP protocol suite for the LINUX |
| 4 | * operating system. INET is implemented using the BSD Socket |
| 5 | * interface as the means of communication with the user level. |
| 6 | * |
| 7 | * Checksumming functions for IP, TCP, UDP and so on |
| 8 | * |
| 9 | * Authors: Jorge Cwik, <jorge@laser.satlink.net> |
| 10 | * Arnt Gulbrandsen, <agulbra@nvg.unit.no> |
| 11 | * Borrows very liberally from tcp.c and ip.c, see those |
| 12 | * files for more names. |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 13 | */ |
| 14 | |
| 15 | #ifndef _CHECKSUM_H |
| 16 | #define _CHECKSUM_H |
| 17 | |
| 18 | #include <linux/errno.h> |
| 19 | #include <asm/types.h> |
| 20 | #include <asm/byteorder.h> |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 21 | #include <asm/checksum.h> |
Jakub Kicinski | 68f4eae | 2023-01-25 23:14:16 -0800 | [diff] [blame] | 22 | #if !defined(_HAVE_ARCH_COPY_AND_CSUM_FROM_USER) || !defined(HAVE_CSUM_COPY_USER) |
| 23 | #include <linux/uaccess.h> |
| 24 | #endif |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 25 | |
| 26 | #ifndef _HAVE_ARCH_COPY_AND_CSUM_FROM_USER |
Christophe Leroy | 5486f5b | 2022-02-17 14:35:49 +0100 | [diff] [blame] | 27 | static __always_inline |
Al Viro | 56649d5 | 2006-11-14 21:23:59 -0800 | [diff] [blame] | 28 | __wsum csum_and_copy_from_user (const void __user *src, void *dst, |
Al Viro | c693cc4 | 2020-07-11 00:27:49 -0400 | [diff] [blame] | 29 | int len) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 30 | { |
Al Viro | 5904122 | 2020-02-18 14:26:32 -0500 | [diff] [blame] | 31 | if (copy_from_user(dst, src, len)) |
Al Viro | c693cc4 | 2020-07-11 00:27:49 -0400 | [diff] [blame] | 32 | return 0; |
| 33 | return csum_partial(dst, len, ~0U); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 34 | } |
| 35 | #endif |
| 36 | |
| 37 | #ifndef HAVE_CSUM_COPY_USER |
Christophe Leroy | 5486f5b | 2022-02-17 14:35:49 +0100 | [diff] [blame] | 38 | static __always_inline __wsum csum_and_copy_to_user |
Al Viro | c693cc4 | 2020-07-11 00:27:49 -0400 | [diff] [blame] | 39 | (const void *src, void __user *dst, int len) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 40 | { |
Al Viro | c693cc4 | 2020-07-11 00:27:49 -0400 | [diff] [blame] | 41 | __wsum sum = csum_partial(src, len, ~0U); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 42 | |
Al Viro | 001c1a6 | 2020-04-25 18:01:30 -0400 | [diff] [blame] | 43 | if (copy_to_user(dst, src, len) == 0) |
| 44 | return sum; |
Al Viro | c693cc4 | 2020-07-11 00:27:49 -0400 | [diff] [blame] | 45 | return 0; |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 46 | } |
| 47 | #endif |
| 48 | |
Al Viro | 6e41c58 | 2020-07-22 22:14:36 -0400 | [diff] [blame] | 49 | #ifndef _HAVE_ARCH_CSUM_AND_COPY |
Christophe Leroy | 5486f5b | 2022-02-17 14:35:49 +0100 | [diff] [blame] | 50 | static __always_inline __wsum |
Al Viro | cc44c17 | 2020-07-11 00:12:07 -0400 | [diff] [blame] | 51 | csum_partial_copy_nocheck(const void *src, void *dst, int len) |
Al Viro | 6e41c58 | 2020-07-22 22:14:36 -0400 | [diff] [blame] | 52 | { |
| 53 | memcpy(dst, src, len); |
Al Viro | cc44c17 | 2020-07-11 00:12:07 -0400 | [diff] [blame] | 54 | return csum_partial(dst, len, 0); |
Al Viro | 6e41c58 | 2020-07-22 22:14:36 -0400 | [diff] [blame] | 55 | } |
| 56 | #endif |
| 57 | |
Tom Herbert | 07064c6 | 2014-05-02 16:28:03 -0700 | [diff] [blame] | 58 | #ifndef HAVE_ARCH_CSUM_ADD |
Christophe Leroy | 5486f5b | 2022-02-17 14:35:49 +0100 | [diff] [blame] | 59 | static __always_inline __wsum csum_add(__wsum csum, __wsum addend) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 60 | { |
Al Viro | 56649d5 | 2006-11-14 21:23:59 -0800 | [diff] [blame] | 61 | u32 res = (__force u32)csum; |
| 62 | res += (__force u32)addend; |
| 63 | return (__force __wsum)(res + (res < (__force u32)addend)); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 64 | } |
Tom Herbert | 07064c6 | 2014-05-02 16:28:03 -0700 | [diff] [blame] | 65 | #endif |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 66 | |
Christophe Leroy | 5486f5b | 2022-02-17 14:35:49 +0100 | [diff] [blame] | 67 | static __always_inline __wsum csum_sub(__wsum csum, __wsum addend) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 68 | { |
| 69 | return csum_add(csum, ~addend); |
| 70 | } |
| 71 | |
Christophe Leroy | 5486f5b | 2022-02-17 14:35:49 +0100 | [diff] [blame] | 72 | static __always_inline __sum16 csum16_add(__sum16 csum, __be16 addend) |
Eric Dumazet | 99f0b95 | 2014-03-23 19:51:36 -0700 | [diff] [blame] | 73 | { |
| 74 | u16 res = (__force u16)csum; |
| 75 | |
| 76 | res += (__force u16)addend; |
| 77 | return (__force __sum16)(res + (res < (__force u16)addend)); |
| 78 | } |
| 79 | |
Christophe Leroy | 5486f5b | 2022-02-17 14:35:49 +0100 | [diff] [blame] | 80 | static __always_inline __sum16 csum16_sub(__sum16 csum, __be16 addend) |
Eric Dumazet | 99f0b95 | 2014-03-23 19:51:36 -0700 | [diff] [blame] | 81 | { |
| 82 | return csum16_add(csum, ~addend); |
| 83 | } |
| 84 | |
Christophe Leroy | 3af722c | 2022-03-08 17:12:10 +0100 | [diff] [blame] | 85 | #ifndef HAVE_ARCH_CSUM_SHIFT |
Christophe Leroy | 5486f5b | 2022-02-17 14:35:49 +0100 | [diff] [blame] | 86 | static __always_inline __wsum csum_shift(__wsum sum, int offset) |
Al Viro | 594e450 | 2021-06-05 10:19:30 -0400 | [diff] [blame] | 87 | { |
| 88 | /* rotate sum to align it with a 16b boundary */ |
| 89 | if (offset & 1) |
| 90 | return (__force __wsum)ror32((__force u32)sum, 8); |
| 91 | return sum; |
| 92 | } |
Christophe Leroy | 3af722c | 2022-03-08 17:12:10 +0100 | [diff] [blame] | 93 | #endif |
Al Viro | 594e450 | 2021-06-05 10:19:30 -0400 | [diff] [blame] | 94 | |
Christophe Leroy | 5486f5b | 2022-02-17 14:35:49 +0100 | [diff] [blame] | 95 | static __always_inline __wsum |
Al Viro | 56649d5 | 2006-11-14 21:23:59 -0800 | [diff] [blame] | 96 | csum_block_add(__wsum csum, __wsum csum2, int offset) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 97 | { |
Al Viro | 594e450 | 2021-06-05 10:19:30 -0400 | [diff] [blame] | 98 | return csum_add(csum, csum_shift(csum2, offset)); |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 99 | } |
| 100 | |
Christophe Leroy | 5486f5b | 2022-02-17 14:35:49 +0100 | [diff] [blame] | 101 | static __always_inline __wsum |
Daniel Borkmann | 2817a33 | 2013-10-30 11:50:51 +0100 | [diff] [blame] | 102 | csum_block_add_ext(__wsum csum, __wsum csum2, int offset, int len) |
| 103 | { |
| 104 | return csum_block_add(csum, csum2, offset); |
| 105 | } |
| 106 | |
Christophe Leroy | 5486f5b | 2022-02-17 14:35:49 +0100 | [diff] [blame] | 107 | static __always_inline __wsum |
Al Viro | 56649d5 | 2006-11-14 21:23:59 -0800 | [diff] [blame] | 108 | csum_block_sub(__wsum csum, __wsum csum2, int offset) |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 109 | { |
Alexander Duyck | 3380396 | 2016-03-09 09:25:26 -0800 | [diff] [blame] | 110 | return csum_block_add(csum, ~csum2, offset); |
Al Viro | 56649d5 | 2006-11-14 21:23:59 -0800 | [diff] [blame] | 111 | } |
| 112 | |
Christophe Leroy | 5486f5b | 2022-02-17 14:35:49 +0100 | [diff] [blame] | 113 | static __always_inline __wsum csum_unfold(__sum16 n) |
Al Viro | 56649d5 | 2006-11-14 21:23:59 -0800 | [diff] [blame] | 114 | { |
| 115 | return (__force __wsum)n; |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 116 | } |
| 117 | |
Christophe Leroy | 5486f5b | 2022-02-17 14:35:49 +0100 | [diff] [blame] | 118 | static __always_inline |
| 119 | __wsum csum_partial_ext(const void *buff, int len, __wsum sum) |
Daniel Borkmann | cea80ea | 2013-11-04 17:10:25 +0100 | [diff] [blame] | 120 | { |
| 121 | return csum_partial(buff, len, sum); |
| 122 | } |
| 123 | |
Al Viro | f6ab028 | 2006-11-16 02:36:50 -0800 | [diff] [blame] | 124 | #define CSUM_MANGLED_0 ((__force __sum16)0xffff) |
Patrick McHardy | a99a00c | 2007-11-30 01:14:30 +1100 | [diff] [blame] | 125 | |
Christophe Leroy | 5486f5b | 2022-02-17 14:35:49 +0100 | [diff] [blame] | 126 | static __always_inline void csum_replace_by_diff(__sum16 *sum, __wsum diff) |
Daniel Borkmann | 8050c0f | 2016-03-04 15:15:02 +0100 | [diff] [blame] | 127 | { |
| 128 | *sum = csum_fold(csum_add(diff, ~csum_unfold(*sum))); |
| 129 | } |
| 130 | |
Christophe Leroy | 5486f5b | 2022-02-17 14:35:49 +0100 | [diff] [blame] | 131 | static __always_inline void csum_replace4(__sum16 *sum, __be32 from, __be32 to) |
Patrick McHardy | a99a00c | 2007-11-30 01:14:30 +1100 | [diff] [blame] | 132 | { |
Eric Dumazet | d53a2aa | 2015-05-15 08:52:19 -0700 | [diff] [blame] | 133 | __wsum tmp = csum_sub(~csum_unfold(*sum), (__force __wsum)from); |
| 134 | |
| 135 | *sum = csum_fold(csum_add(tmp, (__force __wsum)to)); |
Patrick McHardy | a99a00c | 2007-11-30 01:14:30 +1100 | [diff] [blame] | 136 | } |
| 137 | |
Eric Dumazet | 99f0b95 | 2014-03-23 19:51:36 -0700 | [diff] [blame] | 138 | /* Implements RFC 1624 (Incremental Internet Checksum) |
| 139 | * 3. Discussion states : |
| 140 | * HC' = ~(~HC + ~m + m') |
| 141 | * m : old value of a 16bit field |
| 142 | * m' : new value of a 16bit field |
| 143 | */ |
Christophe Leroy | 5486f5b | 2022-02-17 14:35:49 +0100 | [diff] [blame] | 144 | static __always_inline void csum_replace2(__sum16 *sum, __be16 old, __be16 new) |
Patrick McHardy | a99a00c | 2007-11-30 01:14:30 +1100 | [diff] [blame] | 145 | { |
Eric Dumazet | 99f0b95 | 2014-03-23 19:51:36 -0700 | [diff] [blame] | 146 | *sum = ~csum16_add(csum16_sub(~(*sum), old), new); |
Patrick McHardy | a99a00c | 2007-11-30 01:14:30 +1100 | [diff] [blame] | 147 | } |
| 148 | |
Paul Blakey | d9b5ae5 | 2022-02-23 18:34:16 +0200 | [diff] [blame] | 149 | static inline void csum_replace(__wsum *csum, __wsum old, __wsum new) |
| 150 | { |
| 151 | *csum = csum_add(csum_sub(*csum, old), new); |
| 152 | } |
| 153 | |
Patrick McHardy | a99a00c | 2007-11-30 01:14:30 +1100 | [diff] [blame] | 154 | struct sk_buff; |
Joe Perches | 4fc7074 | 2013-07-31 17:31:38 -0700 | [diff] [blame] | 155 | void inet_proto_csum_replace4(__sum16 *sum, struct sk_buff *skb, |
Tom Herbert | 4b048d6 | 2015-08-17 13:42:25 -0700 | [diff] [blame] | 156 | __be32 from, __be32 to, bool pseudohdr); |
Joe Perches | 4fc7074 | 2013-07-31 17:31:38 -0700 | [diff] [blame] | 157 | void inet_proto_csum_replace16(__sum16 *sum, struct sk_buff *skb, |
| 158 | const __be32 *from, const __be32 *to, |
Tom Herbert | 4b048d6 | 2015-08-17 13:42:25 -0700 | [diff] [blame] | 159 | bool pseudohdr); |
Tom Herbert | abc5d1f | 2015-08-17 13:42:26 -0700 | [diff] [blame] | 160 | void inet_proto_csum_replace_by_diff(__sum16 *sum, struct sk_buff *skb, |
| 161 | __wsum diff, bool pseudohdr); |
Patrick McHardy | a99a00c | 2007-11-30 01:14:30 +1100 | [diff] [blame] | 162 | |
Christophe Leroy | 5486f5b | 2022-02-17 14:35:49 +0100 | [diff] [blame] | 163 | static __always_inline |
| 164 | void inet_proto_csum_replace2(__sum16 *sum, struct sk_buff *skb, |
| 165 | __be16 from, __be16 to, bool pseudohdr) |
Patrick McHardy | a99a00c | 2007-11-30 01:14:30 +1100 | [diff] [blame] | 166 | { |
| 167 | inet_proto_csum_replace4(sum, skb, (__force __be32)from, |
| 168 | (__force __be32)to, pseudohdr); |
| 169 | } |
| 170 | |
Christophe Leroy | 5486f5b | 2022-02-17 14:35:49 +0100 | [diff] [blame] | 171 | static __always_inline __wsum remcsum_adjust(void *ptr, __wsum csum, |
| 172 | int start, int offset) |
Tom Herbert | 7c967b2 | 2014-11-25 11:21:19 -0800 | [diff] [blame] | 173 | { |
| 174 | __sum16 *psum = (__sum16 *)(ptr + offset); |
| 175 | __wsum delta; |
| 176 | |
| 177 | /* Subtract out checksum up to start */ |
| 178 | csum = csum_sub(csum, csum_partial(ptr, start, 0)); |
| 179 | |
| 180 | /* Set derived checksum in packet */ |
Tom Herbert | 369620a | 2015-12-10 12:37:44 -0800 | [diff] [blame] | 181 | delta = csum_sub((__force __wsum)csum_fold(csum), |
| 182 | (__force __wsum)*psum); |
Tom Herbert | 7c967b2 | 2014-11-25 11:21:19 -0800 | [diff] [blame] | 183 | *psum = csum_fold(csum); |
| 184 | |
| 185 | return delta; |
| 186 | } |
| 187 | |
Christophe Leroy | 5486f5b | 2022-02-17 14:35:49 +0100 | [diff] [blame] | 188 | static __always_inline void remcsum_unadjust(__sum16 *psum, __wsum delta) |
Tom Herbert | 26c4f7d | 2015-02-10 16:30:27 -0800 | [diff] [blame] | 189 | { |
Lance Richardson | 22fbece | 2017-01-18 15:14:56 -0500 | [diff] [blame] | 190 | *psum = csum_fold(csum_sub(delta, (__force __wsum)*psum)); |
Tom Herbert | 26c4f7d | 2015-02-10 16:30:27 -0800 | [diff] [blame] | 191 | } |
| 192 | |
Christophe Leroy | 5486f5b | 2022-02-17 14:35:49 +0100 | [diff] [blame] | 193 | static __always_inline __wsum wsum_negate(__wsum val) |
Eric Dumazet | 45cac67 | 2021-12-03 20:53:56 -0800 | [diff] [blame] | 194 | { |
| 195 | return (__force __wsum)-((__force u32)val); |
| 196 | } |
Linus Torvalds | 1da177e | 2005-04-16 15:20:36 -0700 | [diff] [blame] | 197 | #endif |