Thomas Gleixner | 2874c5f | 2019-05-27 08:55:01 +0200 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0-or-later |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 2 | /* Kerberos-based RxRPC security |
| 3 | * |
| 4 | * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved. |
| 5 | * Written by David Howells (dhowells@redhat.com) |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 6 | */ |
| 7 | |
Joe Perches | 9b6d539 | 2016-06-02 12:08:52 -0700 | [diff] [blame] | 8 | #define pr_fmt(fmt) KBUILD_MODNAME ": " fmt |
| 9 | |
Herbert Xu | 1afe593 | 2016-01-24 21:19:01 +0800 | [diff] [blame] | 10 | #include <crypto/skcipher.h> |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 11 | #include <linux/module.h> |
| 12 | #include <linux/net.h> |
| 13 | #include <linux/skbuff.h> |
| 14 | #include <linux/udp.h> |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 15 | #include <linux/scatterlist.h> |
| 16 | #include <linux/ctype.h> |
Tejun Heo | 5a0e3ad | 2010-03-24 17:04:11 +0900 | [diff] [blame] | 17 | #include <linux/slab.h> |
David Howells | 12da59f | 2020-09-16 08:37:29 +0100 | [diff] [blame] | 18 | #include <linux/key-type.h> |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 19 | #include <net/sock.h> |
| 20 | #include <net/af_rxrpc.h> |
David Howells | 3394128 | 2009-09-14 01:17:35 +0000 | [diff] [blame] | 21 | #include <keys/rxrpc-type.h> |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 22 | #include "ar-internal.h" |
| 23 | |
| 24 | #define RXKAD_VERSION 2 |
| 25 | #define MAXKRB5TICKETLEN 1024 |
| 26 | #define RXKAD_TKT_TYPE_KERBEROS_V5 256 |
| 27 | #define ANAME_SZ 40 /* size of authentication name */ |
| 28 | #define INST_SZ 40 /* size of principal's instance */ |
| 29 | #define REALM_SZ 40 /* size of principal's auth domain */ |
| 30 | #define SNAME_SZ 40 /* size of service name */ |
David Howells | d7d775b | 2020-09-16 01:34:39 +0100 | [diff] [blame] | 31 | #define RXKAD_ALIGN 8 |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 32 | |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 33 | struct rxkad_level1_hdr { |
| 34 | __be32 data_size; /* true data size (excluding padding) */ |
| 35 | }; |
| 36 | |
| 37 | struct rxkad_level2_hdr { |
| 38 | __be32 data_size; /* true data size (excluding padding) */ |
| 39 | __be32 checksum; /* decrypted data checksum */ |
| 40 | }; |
| 41 | |
David Howells | 8d47a43 | 2020-09-16 01:38:15 +0100 | [diff] [blame] | 42 | static int rxkad_prime_packet_security(struct rxrpc_connection *conn, |
| 43 | struct crypto_sync_skcipher *ci); |
| 44 | |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 45 | /* |
| 46 | * this holds a pinned cipher so that keventd doesn't get called by the cipher |
| 47 | * alloc routine, but since we have it to hand, we use it to decrypt RESPONSE |
| 48 | * packets |
| 49 | */ |
Kees Cook | 69d826f | 2018-09-18 19:10:47 -0700 | [diff] [blame] | 50 | static struct crypto_sync_skcipher *rxkad_ci; |
David Howells | 1db88c5 | 2019-07-30 15:56:57 +0100 | [diff] [blame] | 51 | static struct skcipher_request *rxkad_ci_req; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 52 | static DEFINE_MUTEX(rxkad_ci_mutex); |
| 53 | |
| 54 | /* |
David Howells | 12da59f | 2020-09-16 08:37:29 +0100 | [diff] [blame] | 55 | * Parse the information from a server key |
| 56 | * |
| 57 | * The data should be the 8-byte secret key. |
| 58 | */ |
| 59 | static int rxkad_preparse_server_key(struct key_preparsed_payload *prep) |
| 60 | { |
| 61 | struct crypto_skcipher *ci; |
| 62 | |
| 63 | if (prep->datalen != 8) |
| 64 | return -EINVAL; |
| 65 | |
| 66 | memcpy(&prep->payload.data[2], prep->data, 8); |
| 67 | |
| 68 | ci = crypto_alloc_skcipher("pcbc(des)", 0, CRYPTO_ALG_ASYNC); |
| 69 | if (IS_ERR(ci)) { |
| 70 | _leave(" = %ld", PTR_ERR(ci)); |
| 71 | return PTR_ERR(ci); |
| 72 | } |
| 73 | |
| 74 | if (crypto_skcipher_setkey(ci, prep->data, 8) < 0) |
| 75 | BUG(); |
| 76 | |
| 77 | prep->payload.data[0] = ci; |
| 78 | _leave(" = 0"); |
| 79 | return 0; |
| 80 | } |
| 81 | |
| 82 | static void rxkad_free_preparse_server_key(struct key_preparsed_payload *prep) |
| 83 | { |
David Howells | d7d775b | 2020-09-16 01:34:39 +0100 | [diff] [blame] | 84 | |
David Howells | 12da59f | 2020-09-16 08:37:29 +0100 | [diff] [blame] | 85 | if (prep->payload.data[0]) |
| 86 | crypto_free_skcipher(prep->payload.data[0]); |
| 87 | } |
| 88 | |
| 89 | static void rxkad_destroy_server_key(struct key *key) |
| 90 | { |
| 91 | if (key->payload.data[0]) { |
| 92 | crypto_free_skcipher(key->payload.data[0]); |
| 93 | key->payload.data[0] = NULL; |
| 94 | } |
| 95 | } |
| 96 | |
| 97 | /* |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 98 | * initialise connection security |
| 99 | */ |
David Howells | 41057eb | 2020-09-16 08:19:12 +0100 | [diff] [blame] | 100 | static int rxkad_init_connection_security(struct rxrpc_connection *conn, |
| 101 | struct rxrpc_key_token *token) |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 102 | { |
Kees Cook | 69d826f | 2018-09-18 19:10:47 -0700 | [diff] [blame] | 103 | struct crypto_sync_skcipher *ci; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 104 | int ret; |
| 105 | |
David Howells | 2cc8008 | 2022-10-19 13:49:02 +0100 | [diff] [blame] | 106 | _enter("{%d},{%x}", conn->debug_id, key_serial(conn->key)); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 107 | |
David Howells | 3394128 | 2009-09-14 01:17:35 +0000 | [diff] [blame] | 108 | conn->security_ix = token->security_index; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 109 | |
Kees Cook | 69d826f | 2018-09-18 19:10:47 -0700 | [diff] [blame] | 110 | ci = crypto_alloc_sync_skcipher("pcbc(fcrypt)", 0, 0); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 111 | if (IS_ERR(ci)) { |
| 112 | _debug("no cipher"); |
| 113 | ret = PTR_ERR(ci); |
| 114 | goto error; |
| 115 | } |
| 116 | |
Kees Cook | 69d826f | 2018-09-18 19:10:47 -0700 | [diff] [blame] | 117 | if (crypto_sync_skcipher_setkey(ci, token->kad->session_key, |
Herbert Xu | 1afe593 | 2016-01-24 21:19:01 +0800 | [diff] [blame] | 118 | sizeof(token->kad->session_key)) < 0) |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 119 | BUG(); |
| 120 | |
David Howells | 2cc8008 | 2022-10-19 13:49:02 +0100 | [diff] [blame] | 121 | switch (conn->security_level) { |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 122 | case RXRPC_SECURITY_PLAIN: |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 123 | case RXRPC_SECURITY_AUTH: |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 124 | case RXRPC_SECURITY_ENCRYPT: |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 125 | break; |
| 126 | default: |
| 127 | ret = -EKEYREJECTED; |
| 128 | goto error; |
| 129 | } |
| 130 | |
David Howells | 8d47a43 | 2020-09-16 01:38:15 +0100 | [diff] [blame] | 131 | ret = rxkad_prime_packet_security(conn, ci); |
| 132 | if (ret < 0) |
| 133 | goto error_ci; |
| 134 | |
David Howells | 521bb30 | 2020-09-22 13:36:17 +0100 | [diff] [blame] | 135 | conn->rxkad.cipher = ci; |
David Howells | 8d47a43 | 2020-09-16 01:38:15 +0100 | [diff] [blame] | 136 | return 0; |
| 137 | |
| 138 | error_ci: |
| 139 | crypto_free_sync_skcipher(ci); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 140 | error: |
| 141 | _leave(" = %d", ret); |
| 142 | return ret; |
| 143 | } |
| 144 | |
| 145 | /* |
David Howells | d7d775b | 2020-09-16 01:34:39 +0100 | [diff] [blame] | 146 | * Work out how much data we can put in a packet. |
| 147 | */ |
| 148 | static int rxkad_how_much_data(struct rxrpc_call *call, size_t remain, |
| 149 | size_t *_buf_size, size_t *_data_size, size_t *_offset) |
| 150 | { |
| 151 | size_t shdr, buf_size, chunk; |
| 152 | |
David Howells | 2cc8008 | 2022-10-19 13:49:02 +0100 | [diff] [blame] | 153 | switch (call->conn->security_level) { |
David Howells | d7d775b | 2020-09-16 01:34:39 +0100 | [diff] [blame] | 154 | default: |
| 155 | buf_size = chunk = min_t(size_t, remain, RXRPC_JUMBO_DATALEN); |
| 156 | shdr = 0; |
| 157 | goto out; |
| 158 | case RXRPC_SECURITY_AUTH: |
| 159 | shdr = sizeof(struct rxkad_level1_hdr); |
| 160 | break; |
| 161 | case RXRPC_SECURITY_ENCRYPT: |
| 162 | shdr = sizeof(struct rxkad_level2_hdr); |
| 163 | break; |
| 164 | } |
| 165 | |
| 166 | buf_size = round_down(RXRPC_JUMBO_DATALEN, RXKAD_ALIGN); |
| 167 | |
| 168 | chunk = buf_size - shdr; |
| 169 | if (remain < chunk) |
| 170 | buf_size = round_up(shdr + remain, RXKAD_ALIGN); |
| 171 | |
| 172 | out: |
| 173 | *_buf_size = buf_size; |
| 174 | *_data_size = chunk; |
| 175 | *_offset = shdr; |
| 176 | return 0; |
| 177 | } |
| 178 | |
| 179 | /* |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 180 | * prime the encryption state with the invariant parts of a connection's |
| 181 | * description |
| 182 | */ |
David Howells | 8d47a43 | 2020-09-16 01:38:15 +0100 | [diff] [blame] | 183 | static int rxkad_prime_packet_security(struct rxrpc_connection *conn, |
| 184 | struct crypto_sync_skcipher *ci) |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 185 | { |
David Howells | 1db88c5 | 2019-07-30 15:56:57 +0100 | [diff] [blame] | 186 | struct skcipher_request *req; |
David Howells | 3394128 | 2009-09-14 01:17:35 +0000 | [diff] [blame] | 187 | struct rxrpc_key_token *token; |
Herbert Xu | a263629 | 2016-06-26 14:55:24 -0700 | [diff] [blame] | 188 | struct scatterlist sg; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 189 | struct rxrpc_crypt iv; |
Herbert Xu | a263629 | 2016-06-26 14:55:24 -0700 | [diff] [blame] | 190 | __be32 *tmpbuf; |
| 191 | size_t tmpsize = 4 * sizeof(__be32); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 192 | |
| 193 | _enter(""); |
| 194 | |
David Howells | 2cc8008 | 2022-10-19 13:49:02 +0100 | [diff] [blame] | 195 | if (!conn->key) |
Herbert Xu | a263629 | 2016-06-26 14:55:24 -0700 | [diff] [blame] | 196 | return 0; |
| 197 | |
| 198 | tmpbuf = kmalloc(tmpsize, GFP_KERNEL); |
| 199 | if (!tmpbuf) |
| 200 | return -ENOMEM; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 201 | |
David Howells | 8d47a43 | 2020-09-16 01:38:15 +0100 | [diff] [blame] | 202 | req = skcipher_request_alloc(&ci->base, GFP_NOFS); |
David Howells | 1db88c5 | 2019-07-30 15:56:57 +0100 | [diff] [blame] | 203 | if (!req) { |
| 204 | kfree(tmpbuf); |
| 205 | return -ENOMEM; |
| 206 | } |
| 207 | |
David Howells | 2cc8008 | 2022-10-19 13:49:02 +0100 | [diff] [blame] | 208 | token = conn->key->payload.data[0]; |
David Howells | 3394128 | 2009-09-14 01:17:35 +0000 | [diff] [blame] | 209 | memcpy(&iv, token->kad->session_key, sizeof(iv)); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 210 | |
Herbert Xu | a263629 | 2016-06-26 14:55:24 -0700 | [diff] [blame] | 211 | tmpbuf[0] = htonl(conn->proto.epoch); |
| 212 | tmpbuf[1] = htonl(conn->proto.cid); |
| 213 | tmpbuf[2] = 0; |
| 214 | tmpbuf[3] = htonl(conn->security_ix); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 215 | |
Herbert Xu | a263629 | 2016-06-26 14:55:24 -0700 | [diff] [blame] | 216 | sg_init_one(&sg, tmpbuf, tmpsize); |
David Howells | 8d47a43 | 2020-09-16 01:38:15 +0100 | [diff] [blame] | 217 | skcipher_request_set_sync_tfm(req, ci); |
Herbert Xu | 1afe593 | 2016-01-24 21:19:01 +0800 | [diff] [blame] | 218 | skcipher_request_set_callback(req, 0, NULL, NULL); |
Herbert Xu | a263629 | 2016-06-26 14:55:24 -0700 | [diff] [blame] | 219 | skcipher_request_set_crypt(req, &sg, &sg, tmpsize, iv.x); |
Herbert Xu | 1afe593 | 2016-01-24 21:19:01 +0800 | [diff] [blame] | 220 | crypto_skcipher_encrypt(req); |
David Howells | 1db88c5 | 2019-07-30 15:56:57 +0100 | [diff] [blame] | 221 | skcipher_request_free(req); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 222 | |
David Howells | 521bb30 | 2020-09-22 13:36:17 +0100 | [diff] [blame] | 223 | memcpy(&conn->rxkad.csum_iv, tmpbuf + 2, sizeof(conn->rxkad.csum_iv)); |
Herbert Xu | a263629 | 2016-06-26 14:55:24 -0700 | [diff] [blame] | 224 | kfree(tmpbuf); |
| 225 | _leave(" = 0"); |
| 226 | return 0; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 227 | } |
| 228 | |
| 229 | /* |
David Howells | 1db88c5 | 2019-07-30 15:56:57 +0100 | [diff] [blame] | 230 | * Allocate and prepare the crypto request on a call. For any particular call, |
| 231 | * this is called serially for the packets, so no lock should be necessary. |
| 232 | */ |
| 233 | static struct skcipher_request *rxkad_get_call_crypto(struct rxrpc_call *call) |
| 234 | { |
David Howells | 521bb30 | 2020-09-22 13:36:17 +0100 | [diff] [blame] | 235 | struct crypto_skcipher *tfm = &call->conn->rxkad.cipher->base; |
David Howells | 1db88c5 | 2019-07-30 15:56:57 +0100 | [diff] [blame] | 236 | |
David Howells | 30d95ef | 2022-11-04 23:13:40 +0000 | [diff] [blame] | 237 | return skcipher_request_alloc(tfm, GFP_NOFS); |
David Howells | 1db88c5 | 2019-07-30 15:56:57 +0100 | [diff] [blame] | 238 | } |
| 239 | |
| 240 | /* |
| 241 | * Clean up the crypto on a call. |
| 242 | */ |
| 243 | static void rxkad_free_call_crypto(struct rxrpc_call *call) |
| 244 | { |
David Howells | 1db88c5 | 2019-07-30 15:56:57 +0100 | [diff] [blame] | 245 | } |
| 246 | |
| 247 | /* |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 248 | * partially encrypt a packet (level 1 security) |
| 249 | */ |
| 250 | static int rxkad_secure_packet_auth(const struct rxrpc_call *call, |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 251 | struct rxrpc_txbuf *txb, |
Kees Cook | 54424d3 | 2018-08-03 10:15:25 +0100 | [diff] [blame] | 252 | struct skcipher_request *req) |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 253 | { |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 254 | struct rxkad_level1_hdr *hdr = (void *)txb->data; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 255 | struct rxrpc_crypt iv; |
Herbert Xu | a263629 | 2016-06-26 14:55:24 -0700 | [diff] [blame] | 256 | struct scatterlist sg; |
David Howells | d7d775b | 2020-09-16 01:34:39 +0100 | [diff] [blame] | 257 | size_t pad; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 258 | u16 check; |
| 259 | |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 260 | _enter(""); |
| 261 | |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 262 | check = txb->seq ^ ntohl(txb->wire.callNumber); |
| 263 | hdr->data_size = htonl((u32)check << 16 | txb->len); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 264 | |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 265 | txb->len += sizeof(struct rxkad_level1_hdr); |
| 266 | pad = txb->len; |
David Howells | d7d775b | 2020-09-16 01:34:39 +0100 | [diff] [blame] | 267 | pad = RXKAD_ALIGN - pad; |
| 268 | pad &= RXKAD_ALIGN - 1; |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 269 | if (pad) { |
| 270 | memset(txb->data + txb->offset, 0, pad); |
| 271 | txb->len += pad; |
| 272 | } |
David Howells | d7d775b | 2020-09-16 01:34:39 +0100 | [diff] [blame] | 273 | |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 274 | /* start the encryption afresh */ |
| 275 | memset(&iv, 0, sizeof(iv)); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 276 | |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 277 | sg_init_one(&sg, txb->data, 8); |
David Howells | 521bb30 | 2020-09-22 13:36:17 +0100 | [diff] [blame] | 278 | skcipher_request_set_sync_tfm(req, call->conn->rxkad.cipher); |
Herbert Xu | 1afe593 | 2016-01-24 21:19:01 +0800 | [diff] [blame] | 279 | skcipher_request_set_callback(req, 0, NULL, NULL); |
Herbert Xu | a263629 | 2016-06-26 14:55:24 -0700 | [diff] [blame] | 280 | skcipher_request_set_crypt(req, &sg, &sg, 8, iv.x); |
Herbert Xu | 1afe593 | 2016-01-24 21:19:01 +0800 | [diff] [blame] | 281 | crypto_skcipher_encrypt(req); |
| 282 | skcipher_request_zero(req); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 283 | |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 284 | _leave(" = 0"); |
| 285 | return 0; |
| 286 | } |
| 287 | |
| 288 | /* |
| 289 | * wholly encrypt a packet (level 2 security) |
| 290 | */ |
| 291 | static int rxkad_secure_packet_encrypt(const struct rxrpc_call *call, |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 292 | struct rxrpc_txbuf *txb, |
Kees Cook | 54424d3 | 2018-08-03 10:15:25 +0100 | [diff] [blame] | 293 | struct skcipher_request *req) |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 294 | { |
David Howells | 3394128 | 2009-09-14 01:17:35 +0000 | [diff] [blame] | 295 | const struct rxrpc_key_token *token; |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 296 | struct rxkad_level2_hdr *rxkhdr = (void *)txb->data; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 297 | struct rxrpc_crypt iv; |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 298 | struct scatterlist sg; |
David Howells | d7d775b | 2020-09-16 01:34:39 +0100 | [diff] [blame] | 299 | size_t pad; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 300 | u16 check; |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 301 | int ret; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 302 | |
| 303 | _enter(""); |
| 304 | |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 305 | check = txb->seq ^ ntohl(txb->wire.callNumber); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 306 | |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 307 | rxkhdr->data_size = htonl(txb->len | (u32)check << 16); |
| 308 | rxkhdr->checksum = 0; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 309 | |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 310 | txb->len += sizeof(struct rxkad_level2_hdr); |
| 311 | pad = txb->len; |
David Howells | d7d775b | 2020-09-16 01:34:39 +0100 | [diff] [blame] | 312 | pad = RXKAD_ALIGN - pad; |
| 313 | pad &= RXKAD_ALIGN - 1; |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 314 | if (pad) { |
| 315 | memset(txb->data + txb->offset, 0, pad); |
| 316 | txb->len += pad; |
| 317 | } |
David Howells | d7d775b | 2020-09-16 01:34:39 +0100 | [diff] [blame] | 318 | |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 319 | /* encrypt from the session key */ |
David Howells | 2cc8008 | 2022-10-19 13:49:02 +0100 | [diff] [blame] | 320 | token = call->conn->key->payload.data[0]; |
David Howells | 3394128 | 2009-09-14 01:17:35 +0000 | [diff] [blame] | 321 | memcpy(&iv, token->kad->session_key, sizeof(iv)); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 322 | |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 323 | sg_init_one(&sg, txb->data, txb->len); |
David Howells | 521bb30 | 2020-09-22 13:36:17 +0100 | [diff] [blame] | 324 | skcipher_request_set_sync_tfm(req, call->conn->rxkad.cipher); |
Herbert Xu | 1afe593 | 2016-01-24 21:19:01 +0800 | [diff] [blame] | 325 | skcipher_request_set_callback(req, 0, NULL, NULL); |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 326 | skcipher_request_set_crypt(req, &sg, &sg, txb->len, iv.x); |
| 327 | ret = crypto_skcipher_encrypt(req); |
Herbert Xu | 1afe593 | 2016-01-24 21:19:01 +0800 | [diff] [blame] | 328 | skcipher_request_zero(req); |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 329 | return ret; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 330 | } |
| 331 | |
| 332 | /* |
| 333 | * checksum an RxRPC packet header |
| 334 | */ |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 335 | static int rxkad_secure_packet(struct rxrpc_call *call, struct rxrpc_txbuf *txb) |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 336 | { |
David Howells | 1db88c5 | 2019-07-30 15:56:57 +0100 | [diff] [blame] | 337 | struct skcipher_request *req; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 338 | struct rxrpc_crypt iv; |
Herbert Xu | a263629 | 2016-06-26 14:55:24 -0700 | [diff] [blame] | 339 | struct scatterlist sg; |
David Howells | 30d95ef | 2022-11-04 23:13:40 +0000 | [diff] [blame] | 340 | union { |
| 341 | __be32 buf[2]; |
| 342 | } crypto __aligned(8); |
David Howells | 0d12f8a | 2016-03-04 15:53:46 +0000 | [diff] [blame] | 343 | u32 x, y; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 344 | int ret; |
| 345 | |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 346 | _enter("{%d{%x}},{#%u},%u,", |
David Howells | 2cc8008 | 2022-10-19 13:49:02 +0100 | [diff] [blame] | 347 | call->debug_id, key_serial(call->conn->key), |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 348 | txb->seq, txb->len); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 349 | |
David Howells | 521bb30 | 2020-09-22 13:36:17 +0100 | [diff] [blame] | 350 | if (!call->conn->rxkad.cipher) |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 351 | return 0; |
| 352 | |
David Howells | 2cc8008 | 2022-10-19 13:49:02 +0100 | [diff] [blame] | 353 | ret = key_validate(call->conn->key); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 354 | if (ret < 0) |
| 355 | return ret; |
| 356 | |
David Howells | 1db88c5 | 2019-07-30 15:56:57 +0100 | [diff] [blame] | 357 | req = rxkad_get_call_crypto(call); |
| 358 | if (!req) |
| 359 | return -ENOMEM; |
| 360 | |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 361 | /* continue encrypting from where we left off */ |
David Howells | 521bb30 | 2020-09-22 13:36:17 +0100 | [diff] [blame] | 362 | memcpy(&iv, call->conn->rxkad.csum_iv.x, sizeof(iv)); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 363 | |
| 364 | /* calculate the security checksum */ |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 365 | x = (ntohl(txb->wire.cid) & RXRPC_CHANNELMASK) << (32 - RXRPC_CIDSHIFT); |
| 366 | x |= txb->seq & 0x3fffffff; |
David Howells | 30d95ef | 2022-11-04 23:13:40 +0000 | [diff] [blame] | 367 | crypto.buf[0] = txb->wire.callNumber; |
| 368 | crypto.buf[1] = htonl(x); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 369 | |
David Howells | 30d95ef | 2022-11-04 23:13:40 +0000 | [diff] [blame] | 370 | sg_init_one(&sg, crypto.buf, 8); |
David Howells | 521bb30 | 2020-09-22 13:36:17 +0100 | [diff] [blame] | 371 | skcipher_request_set_sync_tfm(req, call->conn->rxkad.cipher); |
Herbert Xu | 1afe593 | 2016-01-24 21:19:01 +0800 | [diff] [blame] | 372 | skcipher_request_set_callback(req, 0, NULL, NULL); |
Herbert Xu | a263629 | 2016-06-26 14:55:24 -0700 | [diff] [blame] | 373 | skcipher_request_set_crypt(req, &sg, &sg, 8, iv.x); |
Herbert Xu | 1afe593 | 2016-01-24 21:19:01 +0800 | [diff] [blame] | 374 | crypto_skcipher_encrypt(req); |
| 375 | skcipher_request_zero(req); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 376 | |
David Howells | 30d95ef | 2022-11-04 23:13:40 +0000 | [diff] [blame] | 377 | y = ntohl(crypto.buf[1]); |
Al Viro | 91e916c | 2008-03-29 03:08:38 +0000 | [diff] [blame] | 378 | y = (y >> 16) & 0xffff; |
| 379 | if (y == 0) |
| 380 | y = 1; /* zero checksums are not permitted */ |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 381 | txb->wire.cksum = htons(y); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 382 | |
David Howells | 2cc8008 | 2022-10-19 13:49:02 +0100 | [diff] [blame] | 383 | switch (call->conn->security_level) { |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 384 | case RXRPC_SECURITY_PLAIN: |
| 385 | ret = 0; |
| 386 | break; |
| 387 | case RXRPC_SECURITY_AUTH: |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 388 | ret = rxkad_secure_packet_auth(call, txb, req); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 389 | break; |
| 390 | case RXRPC_SECURITY_ENCRYPT: |
David Howells | a4ea4c4 | 2022-03-31 23:55:08 +0100 | [diff] [blame] | 391 | ret = rxkad_secure_packet_encrypt(call, txb, req); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 392 | break; |
| 393 | default: |
| 394 | ret = -EPERM; |
| 395 | break; |
| 396 | } |
| 397 | |
David Howells | 30d95ef | 2022-11-04 23:13:40 +0000 | [diff] [blame] | 398 | skcipher_request_free(req); |
Justin Stitt | 5b47d23 | 2022-07-07 11:20:52 -0700 | [diff] [blame] | 399 | _leave(" = %d [set %x]", ret, y); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 400 | return ret; |
| 401 | } |
| 402 | |
| 403 | /* |
| 404 | * decrypt partial encryption on a packet (level 1 security) |
| 405 | */ |
David Howells | 5a42976 | 2016-09-06 22:19:51 +0100 | [diff] [blame] | 406 | static int rxkad_verify_packet_1(struct rxrpc_call *call, struct sk_buff *skb, |
Kees Cook | 54424d3 | 2018-08-03 10:15:25 +0100 | [diff] [blame] | 407 | rxrpc_seq_t seq, |
| 408 | struct skcipher_request *req) |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 409 | { |
| 410 | struct rxkad_level1_hdr sechdr; |
David Howells | d4d02d8 | 2022-10-07 17:44:39 +0100 | [diff] [blame] | 411 | struct rxrpc_skb_priv *sp = rxrpc_skb(skb); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 412 | struct rxrpc_crypt iv; |
Herbert Xu | 68e3f5d | 2007-10-27 00:52:07 -0700 | [diff] [blame] | 413 | struct scatterlist sg[16]; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 414 | u32 data_size, buf; |
| 415 | u16 check; |
David Howells | d0d5c0c | 2019-08-27 10:13:46 +0100 | [diff] [blame] | 416 | int ret; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 417 | |
| 418 | _enter(""); |
| 419 | |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 420 | if (sp->len < 8) |
| 421 | return rxrpc_abort_eproto(call, skb, RXKADSEALEDINCON, |
| 422 | rxkad_abort_1_short_header); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 423 | |
David Howells | 248f219 | 2016-09-08 11:10:12 +0100 | [diff] [blame] | 424 | /* Decrypt the skbuff in-place. TODO: We really want to decrypt |
| 425 | * directly into the target buffer. |
| 426 | */ |
David Howells | d0d5c0c | 2019-08-27 10:13:46 +0100 | [diff] [blame] | 427 | sg_init_table(sg, ARRAY_SIZE(sg)); |
David Howells | d4d02d8 | 2022-10-07 17:44:39 +0100 | [diff] [blame] | 428 | ret = skb_to_sgvec(skb, sg, sp->offset, 8); |
Jason A. Donenfeld | 89a5ea9 | 2017-06-04 04:16:24 +0200 | [diff] [blame] | 429 | if (unlikely(ret < 0)) |
| 430 | return ret; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 431 | |
| 432 | /* start the decryption afresh */ |
| 433 | memset(&iv, 0, sizeof(iv)); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 434 | |
David Howells | 521bb30 | 2020-09-22 13:36:17 +0100 | [diff] [blame] | 435 | skcipher_request_set_sync_tfm(req, call->conn->rxkad.cipher); |
Herbert Xu | 1afe593 | 2016-01-24 21:19:01 +0800 | [diff] [blame] | 436 | skcipher_request_set_callback(req, 0, NULL, NULL); |
| 437 | skcipher_request_set_crypt(req, sg, sg, 8, iv.x); |
Herbert Xu | 1afe593 | 2016-01-24 21:19:01 +0800 | [diff] [blame] | 438 | crypto_skcipher_decrypt(req); |
| 439 | skcipher_request_zero(req); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 440 | |
David Howells | 5a42976 | 2016-09-06 22:19:51 +0100 | [diff] [blame] | 441 | /* Extract the decrypted packet length */ |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 442 | if (skb_copy_bits(skb, sp->offset, &sechdr, sizeof(sechdr)) < 0) |
| 443 | return rxrpc_abort_eproto(call, skb, RXKADDATALEN, |
| 444 | rxkad_abort_1_short_encdata); |
David Howells | d4d02d8 | 2022-10-07 17:44:39 +0100 | [diff] [blame] | 445 | sp->offset += sizeof(sechdr); |
| 446 | sp->len -= sizeof(sechdr); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 447 | |
| 448 | buf = ntohl(sechdr.data_size); |
| 449 | data_size = buf & 0xffff; |
| 450 | |
| 451 | check = buf >> 16; |
David Howells | 5a42976 | 2016-09-06 22:19:51 +0100 | [diff] [blame] | 452 | check ^= seq ^ call->call_id; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 453 | check &= 0xffff; |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 454 | if (check != 0) |
| 455 | return rxrpc_abort_eproto(call, skb, RXKADSEALEDINCON, |
| 456 | rxkad_abort_1_short_check); |
| 457 | if (data_size > sp->len) |
| 458 | return rxrpc_abort_eproto(call, skb, RXKADDATALEN, |
| 459 | rxkad_abort_1_short_data); |
David Howells | d4d02d8 | 2022-10-07 17:44:39 +0100 | [diff] [blame] | 460 | sp->len = data_size; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 461 | |
| 462 | _leave(" = 0 [dlen=%x]", data_size); |
| 463 | return 0; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 464 | } |
| 465 | |
| 466 | /* |
| 467 | * wholly decrypt a packet (level 2 security) |
| 468 | */ |
David Howells | 5a42976 | 2016-09-06 22:19:51 +0100 | [diff] [blame] | 469 | static int rxkad_verify_packet_2(struct rxrpc_call *call, struct sk_buff *skb, |
Kees Cook | 54424d3 | 2018-08-03 10:15:25 +0100 | [diff] [blame] | 470 | rxrpc_seq_t seq, |
| 471 | struct skcipher_request *req) |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 472 | { |
David Howells | 3394128 | 2009-09-14 01:17:35 +0000 | [diff] [blame] | 473 | const struct rxrpc_key_token *token; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 474 | struct rxkad_level2_hdr sechdr; |
David Howells | d4d02d8 | 2022-10-07 17:44:39 +0100 | [diff] [blame] | 475 | struct rxrpc_skb_priv *sp = rxrpc_skb(skb); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 476 | struct rxrpc_crypt iv; |
| 477 | struct scatterlist _sg[4], *sg; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 478 | u32 data_size, buf; |
| 479 | u16 check; |
Jason A. Donenfeld | 89a5ea9 | 2017-06-04 04:16:24 +0200 | [diff] [blame] | 480 | int nsg, ret; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 481 | |
David Howells | d4d02d8 | 2022-10-07 17:44:39 +0100 | [diff] [blame] | 482 | _enter(",{%d}", sp->len); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 483 | |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 484 | if (sp->len < 8) |
| 485 | return rxrpc_abort_eproto(call, skb, RXKADSEALEDINCON, |
| 486 | rxkad_abort_2_short_header); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 487 | |
David Howells | 248f219 | 2016-09-08 11:10:12 +0100 | [diff] [blame] | 488 | /* Decrypt the skbuff in-place. TODO: We really want to decrypt |
| 489 | * directly into the target buffer. |
| 490 | */ |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 491 | sg = _sg; |
David Howells | 0d40f72 | 2022-08-24 22:39:28 +0100 | [diff] [blame] | 492 | nsg = skb_shinfo(skb)->nr_frags + 1; |
David Howells | d0d5c0c | 2019-08-27 10:13:46 +0100 | [diff] [blame] | 493 | if (nsg <= 4) { |
| 494 | nsg = 4; |
| 495 | } else { |
Kees Cook | 6da2ec5 | 2018-06-12 13:55:00 -0700 | [diff] [blame] | 496 | sg = kmalloc_array(nsg, sizeof(*sg), GFP_NOIO); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 497 | if (!sg) |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 498 | return -ENOMEM; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 499 | } |
| 500 | |
Herbert Xu | 68e3f5d | 2007-10-27 00:52:07 -0700 | [diff] [blame] | 501 | sg_init_table(sg, nsg); |
David Howells | d4d02d8 | 2022-10-07 17:44:39 +0100 | [diff] [blame] | 502 | ret = skb_to_sgvec(skb, sg, sp->offset, sp->len); |
Jason A. Donenfeld | 89a5ea9 | 2017-06-04 04:16:24 +0200 | [diff] [blame] | 503 | if (unlikely(ret < 0)) { |
| 504 | if (sg != _sg) |
| 505 | kfree(sg); |
| 506 | return ret; |
| 507 | } |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 508 | |
| 509 | /* decrypt from the session key */ |
David Howells | 2cc8008 | 2022-10-19 13:49:02 +0100 | [diff] [blame] | 510 | token = call->conn->key->payload.data[0]; |
David Howells | 3394128 | 2009-09-14 01:17:35 +0000 | [diff] [blame] | 511 | memcpy(&iv, token->kad->session_key, sizeof(iv)); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 512 | |
David Howells | 521bb30 | 2020-09-22 13:36:17 +0100 | [diff] [blame] | 513 | skcipher_request_set_sync_tfm(req, call->conn->rxkad.cipher); |
Herbert Xu | 1afe593 | 2016-01-24 21:19:01 +0800 | [diff] [blame] | 514 | skcipher_request_set_callback(req, 0, NULL, NULL); |
David Howells | d4d02d8 | 2022-10-07 17:44:39 +0100 | [diff] [blame] | 515 | skcipher_request_set_crypt(req, sg, sg, sp->len, iv.x); |
Herbert Xu | 1afe593 | 2016-01-24 21:19:01 +0800 | [diff] [blame] | 516 | crypto_skcipher_decrypt(req); |
| 517 | skcipher_request_zero(req); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 518 | if (sg != _sg) |
| 519 | kfree(sg); |
| 520 | |
David Howells | 5a42976 | 2016-09-06 22:19:51 +0100 | [diff] [blame] | 521 | /* Extract the decrypted packet length */ |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 522 | if (skb_copy_bits(skb, sp->offset, &sechdr, sizeof(sechdr)) < 0) |
| 523 | return rxrpc_abort_eproto(call, skb, RXKADDATALEN, |
| 524 | rxkad_abort_2_short_len); |
David Howells | d4d02d8 | 2022-10-07 17:44:39 +0100 | [diff] [blame] | 525 | sp->offset += sizeof(sechdr); |
| 526 | sp->len -= sizeof(sechdr); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 527 | |
| 528 | buf = ntohl(sechdr.data_size); |
| 529 | data_size = buf & 0xffff; |
| 530 | |
| 531 | check = buf >> 16; |
David Howells | 5a42976 | 2016-09-06 22:19:51 +0100 | [diff] [blame] | 532 | check ^= seq ^ call->call_id; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 533 | check &= 0xffff; |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 534 | if (check != 0) |
| 535 | return rxrpc_abort_eproto(call, skb, RXKADSEALEDINCON, |
| 536 | rxkad_abort_2_short_check); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 537 | |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 538 | if (data_size > sp->len) |
| 539 | return rxrpc_abort_eproto(call, skb, RXKADDATALEN, |
| 540 | rxkad_abort_2_short_data); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 541 | |
David Howells | d4d02d8 | 2022-10-07 17:44:39 +0100 | [diff] [blame] | 542 | sp->len = data_size; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 543 | _leave(" = 0 [dlen=%x]", data_size); |
| 544 | return 0; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 545 | } |
| 546 | |
| 547 | /* |
David Howells | d4d02d8 | 2022-10-07 17:44:39 +0100 | [diff] [blame] | 548 | * Verify the security on a received packet and the subpackets therein. |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 549 | */ |
David Howells | d4d02d8 | 2022-10-07 17:44:39 +0100 | [diff] [blame] | 550 | static int rxkad_verify_packet(struct rxrpc_call *call, struct sk_buff *skb) |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 551 | { |
David Howells | d4d02d8 | 2022-10-07 17:44:39 +0100 | [diff] [blame] | 552 | struct rxrpc_skb_priv *sp = rxrpc_skb(skb); |
David Howells | 1db88c5 | 2019-07-30 15:56:57 +0100 | [diff] [blame] | 553 | struct skcipher_request *req; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 554 | struct rxrpc_crypt iv; |
Herbert Xu | a263629 | 2016-06-26 14:55:24 -0700 | [diff] [blame] | 555 | struct scatterlist sg; |
David Howells | 30d95ef | 2022-11-04 23:13:40 +0000 | [diff] [blame] | 556 | union { |
| 557 | __be32 buf[2]; |
| 558 | } crypto __aligned(8); |
David Howells | d4d02d8 | 2022-10-07 17:44:39 +0100 | [diff] [blame] | 559 | rxrpc_seq_t seq = sp->hdr.seq; |
David Howells | 30d95ef | 2022-11-04 23:13:40 +0000 | [diff] [blame] | 560 | int ret; |
David Howells | 0d12f8a | 2016-03-04 15:53:46 +0000 | [diff] [blame] | 561 | u16 cksum; |
| 562 | u32 x, y; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 563 | |
| 564 | _enter("{%d{%x}},{#%u}", |
David Howells | 2cc8008 | 2022-10-19 13:49:02 +0100 | [diff] [blame] | 565 | call->debug_id, key_serial(call->conn->key), seq); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 566 | |
David Howells | 521bb30 | 2020-09-22 13:36:17 +0100 | [diff] [blame] | 567 | if (!call->conn->rxkad.cipher) |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 568 | return 0; |
| 569 | |
David Howells | 1db88c5 | 2019-07-30 15:56:57 +0100 | [diff] [blame] | 570 | req = rxkad_get_call_crypto(call); |
| 571 | if (!req) |
| 572 | return -ENOMEM; |
| 573 | |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 574 | /* continue encrypting from where we left off */ |
David Howells | 521bb30 | 2020-09-22 13:36:17 +0100 | [diff] [blame] | 575 | memcpy(&iv, call->conn->rxkad.csum_iv.x, sizeof(iv)); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 576 | |
| 577 | /* validate the security checksum */ |
David Howells | 01a90a4 | 2016-08-23 15:27:24 +0100 | [diff] [blame] | 578 | x = (call->cid & RXRPC_CHANNELMASK) << (32 - RXRPC_CIDSHIFT); |
David Howells | 5a42976 | 2016-09-06 22:19:51 +0100 | [diff] [blame] | 579 | x |= seq & 0x3fffffff; |
David Howells | 30d95ef | 2022-11-04 23:13:40 +0000 | [diff] [blame] | 580 | crypto.buf[0] = htonl(call->call_id); |
| 581 | crypto.buf[1] = htonl(x); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 582 | |
David Howells | 30d95ef | 2022-11-04 23:13:40 +0000 | [diff] [blame] | 583 | sg_init_one(&sg, crypto.buf, 8); |
David Howells | 521bb30 | 2020-09-22 13:36:17 +0100 | [diff] [blame] | 584 | skcipher_request_set_sync_tfm(req, call->conn->rxkad.cipher); |
Herbert Xu | 1afe593 | 2016-01-24 21:19:01 +0800 | [diff] [blame] | 585 | skcipher_request_set_callback(req, 0, NULL, NULL); |
Herbert Xu | a263629 | 2016-06-26 14:55:24 -0700 | [diff] [blame] | 586 | skcipher_request_set_crypt(req, &sg, &sg, 8, iv.x); |
Herbert Xu | 1afe593 | 2016-01-24 21:19:01 +0800 | [diff] [blame] | 587 | crypto_skcipher_encrypt(req); |
| 588 | skcipher_request_zero(req); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 589 | |
David Howells | 30d95ef | 2022-11-04 23:13:40 +0000 | [diff] [blame] | 590 | y = ntohl(crypto.buf[1]); |
David Howells | 0d12f8a | 2016-03-04 15:53:46 +0000 | [diff] [blame] | 591 | cksum = (y >> 16) & 0xffff; |
| 592 | if (cksum == 0) |
| 593 | cksum = 1; /* zero checksums are not permitted */ |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 594 | |
David Howells | d4d02d8 | 2022-10-07 17:44:39 +0100 | [diff] [blame] | 595 | if (cksum != sp->hdr.cksum) { |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 596 | ret = rxrpc_abort_eproto(call, skb, RXKADSEALEDINCON, |
| 597 | rxkad_abort_bad_checksum); |
| 598 | goto out; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 599 | } |
| 600 | |
David Howells | 2cc8008 | 2022-10-19 13:49:02 +0100 | [diff] [blame] | 601 | switch (call->conn->security_level) { |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 602 | case RXRPC_SECURITY_PLAIN: |
David Howells | 30d95ef | 2022-11-04 23:13:40 +0000 | [diff] [blame] | 603 | ret = 0; |
| 604 | break; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 605 | case RXRPC_SECURITY_AUTH: |
David Howells | 30d95ef | 2022-11-04 23:13:40 +0000 | [diff] [blame] | 606 | ret = rxkad_verify_packet_1(call, skb, seq, req); |
| 607 | break; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 608 | case RXRPC_SECURITY_ENCRYPT: |
David Howells | 30d95ef | 2022-11-04 23:13:40 +0000 | [diff] [blame] | 609 | ret = rxkad_verify_packet_2(call, skb, seq, req); |
| 610 | break; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 611 | default: |
David Howells | 30d95ef | 2022-11-04 23:13:40 +0000 | [diff] [blame] | 612 | ret = -ENOANO; |
| 613 | break; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 614 | } |
David Howells | fb46f6e | 2017-04-06 10:12:00 +0100 | [diff] [blame] | 615 | |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 616 | out: |
David Howells | 30d95ef | 2022-11-04 23:13:40 +0000 | [diff] [blame] | 617 | skcipher_request_free(req); |
| 618 | return ret; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 619 | } |
| 620 | |
| 621 | /* |
| 622 | * issue a challenge |
| 623 | */ |
| 624 | static int rxkad_issue_challenge(struct rxrpc_connection *conn) |
| 625 | { |
| 626 | struct rxkad_challenge challenge; |
David Howells | 0d12f8a | 2016-03-04 15:53:46 +0000 | [diff] [blame] | 627 | struct rxrpc_wire_header whdr; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 628 | struct msghdr msg; |
| 629 | struct kvec iov[2]; |
| 630 | size_t len; |
David Howells | 0d12f8a | 2016-03-04 15:53:46 +0000 | [diff] [blame] | 631 | u32 serial; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 632 | int ret; |
| 633 | |
David Howells | ec832bd | 2020-09-16 08:00:44 +0100 | [diff] [blame] | 634 | _enter("{%d}", conn->debug_id); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 635 | |
David Howells | 521bb30 | 2020-09-22 13:36:17 +0100 | [diff] [blame] | 636 | get_random_bytes(&conn->rxkad.nonce, sizeof(conn->rxkad.nonce)); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 637 | |
| 638 | challenge.version = htonl(2); |
David Howells | 521bb30 | 2020-09-22 13:36:17 +0100 | [diff] [blame] | 639 | challenge.nonce = htonl(conn->rxkad.nonce); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 640 | challenge.min_level = htonl(0); |
| 641 | challenge.__padding = 0; |
| 642 | |
David Howells | 2cc8008 | 2022-10-19 13:49:02 +0100 | [diff] [blame] | 643 | msg.msg_name = &conn->peer->srx.transport; |
| 644 | msg.msg_namelen = conn->peer->srx.transport_len; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 645 | msg.msg_control = NULL; |
| 646 | msg.msg_controllen = 0; |
| 647 | msg.msg_flags = 0; |
| 648 | |
David Howells | 19ffa01 | 2016-04-04 14:00:36 +0100 | [diff] [blame] | 649 | whdr.epoch = htonl(conn->proto.epoch); |
| 650 | whdr.cid = htonl(conn->proto.cid); |
David Howells | 0d12f8a | 2016-03-04 15:53:46 +0000 | [diff] [blame] | 651 | whdr.callNumber = 0; |
| 652 | whdr.seq = 0; |
| 653 | whdr.type = RXRPC_PACKET_TYPE_CHALLENGE; |
| 654 | whdr.flags = conn->out_clientflag; |
| 655 | whdr.userStatus = 0; |
| 656 | whdr.securityIndex = conn->security_ix; |
| 657 | whdr._rsvd = 0; |
David Howells | 68d6d1a | 2017-06-05 14:30:49 +0100 | [diff] [blame] | 658 | whdr.serviceId = htons(conn->service_id); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 659 | |
David Howells | 0d12f8a | 2016-03-04 15:53:46 +0000 | [diff] [blame] | 660 | iov[0].iov_base = &whdr; |
| 661 | iov[0].iov_len = sizeof(whdr); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 662 | iov[1].iov_base = &challenge; |
| 663 | iov[1].iov_len = sizeof(challenge); |
| 664 | |
| 665 | len = iov[0].iov_len + iov[1].iov_len; |
| 666 | |
David Howells | 0d12f8a | 2016-03-04 15:53:46 +0000 | [diff] [blame] | 667 | serial = atomic_inc_return(&conn->serial); |
| 668 | whdr.serial = htonl(serial); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 669 | |
David Howells | 2cc8008 | 2022-10-19 13:49:02 +0100 | [diff] [blame] | 670 | ret = kernel_sendmsg(conn->local->socket, &msg, iov, 2, len); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 671 | if (ret < 0) { |
David Howells | 6b47fe1 | 2018-05-10 23:26:01 +0100 | [diff] [blame] | 672 | trace_rxrpc_tx_fail(conn->debug_id, serial, ret, |
David Howells | 4764c0d | 2018-07-23 17:18:37 +0100 | [diff] [blame] | 673 | rxrpc_tx_point_rxkad_challenge); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 674 | return -EAGAIN; |
| 675 | } |
| 676 | |
David Howells | 2cc8008 | 2022-10-19 13:49:02 +0100 | [diff] [blame] | 677 | conn->peer->last_tx_at = ktime_get_seconds(); |
David Howells | 4764c0d | 2018-07-23 17:18:37 +0100 | [diff] [blame] | 678 | trace_rxrpc_tx_packet(conn->debug_id, &whdr, |
| 679 | rxrpc_tx_point_rxkad_challenge); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 680 | _leave(" = 0"); |
| 681 | return 0; |
| 682 | } |
| 683 | |
| 684 | /* |
| 685 | * send a Kerberos security response |
| 686 | */ |
| 687 | static int rxkad_send_response(struct rxrpc_connection *conn, |
David Howells | 0d12f8a | 2016-03-04 15:53:46 +0000 | [diff] [blame] | 688 | struct rxrpc_host_header *hdr, |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 689 | struct rxkad_response *resp, |
| 690 | const struct rxkad_key *s2) |
| 691 | { |
David Howells | 0d12f8a | 2016-03-04 15:53:46 +0000 | [diff] [blame] | 692 | struct rxrpc_wire_header whdr; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 693 | struct msghdr msg; |
| 694 | struct kvec iov[3]; |
| 695 | size_t len; |
David Howells | 0d12f8a | 2016-03-04 15:53:46 +0000 | [diff] [blame] | 696 | u32 serial; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 697 | int ret; |
| 698 | |
| 699 | _enter(""); |
| 700 | |
David Howells | 2cc8008 | 2022-10-19 13:49:02 +0100 | [diff] [blame] | 701 | msg.msg_name = &conn->peer->srx.transport; |
| 702 | msg.msg_namelen = conn->peer->srx.transport_len; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 703 | msg.msg_control = NULL; |
| 704 | msg.msg_controllen = 0; |
| 705 | msg.msg_flags = 0; |
| 706 | |
David Howells | 0d12f8a | 2016-03-04 15:53:46 +0000 | [diff] [blame] | 707 | memset(&whdr, 0, sizeof(whdr)); |
| 708 | whdr.epoch = htonl(hdr->epoch); |
| 709 | whdr.cid = htonl(hdr->cid); |
| 710 | whdr.type = RXRPC_PACKET_TYPE_RESPONSE; |
| 711 | whdr.flags = conn->out_clientflag; |
| 712 | whdr.securityIndex = hdr->securityIndex; |
| 713 | whdr.serviceId = htons(hdr->serviceId); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 714 | |
David Howells | 0d12f8a | 2016-03-04 15:53:46 +0000 | [diff] [blame] | 715 | iov[0].iov_base = &whdr; |
| 716 | iov[0].iov_len = sizeof(whdr); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 717 | iov[1].iov_base = resp; |
| 718 | iov[1].iov_len = sizeof(*resp); |
David Howells | 0d12f8a | 2016-03-04 15:53:46 +0000 | [diff] [blame] | 719 | iov[2].iov_base = (void *)s2->ticket; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 720 | iov[2].iov_len = s2->ticket_len; |
| 721 | |
| 722 | len = iov[0].iov_len + iov[1].iov_len + iov[2].iov_len; |
| 723 | |
David Howells | 0d12f8a | 2016-03-04 15:53:46 +0000 | [diff] [blame] | 724 | serial = atomic_inc_return(&conn->serial); |
| 725 | whdr.serial = htonl(serial); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 726 | |
David Howells | 2cc8008 | 2022-10-19 13:49:02 +0100 | [diff] [blame] | 727 | ret = kernel_sendmsg(conn->local->socket, &msg, iov, 3, len); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 728 | if (ret < 0) { |
David Howells | 6b47fe1 | 2018-05-10 23:26:01 +0100 | [diff] [blame] | 729 | trace_rxrpc_tx_fail(conn->debug_id, serial, ret, |
David Howells | 4764c0d | 2018-07-23 17:18:37 +0100 | [diff] [blame] | 730 | rxrpc_tx_point_rxkad_response); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 731 | return -EAGAIN; |
| 732 | } |
| 733 | |
David Howells | 2cc8008 | 2022-10-19 13:49:02 +0100 | [diff] [blame] | 734 | conn->peer->last_tx_at = ktime_get_seconds(); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 735 | _leave(" = 0"); |
| 736 | return 0; |
| 737 | } |
| 738 | |
| 739 | /* |
| 740 | * calculate the response checksum |
| 741 | */ |
| 742 | static void rxkad_calc_response_checksum(struct rxkad_response *response) |
| 743 | { |
| 744 | u32 csum = 1000003; |
| 745 | int loop; |
| 746 | u8 *p = (u8 *) response; |
| 747 | |
| 748 | for (loop = sizeof(*response); loop > 0; loop--) |
| 749 | csum = csum * 0x10204081 + *p++; |
| 750 | |
| 751 | response->encrypted.checksum = htonl(csum); |
| 752 | } |
| 753 | |
| 754 | /* |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 755 | * encrypt the response packet |
| 756 | */ |
David Howells | 1db88c5 | 2019-07-30 15:56:57 +0100 | [diff] [blame] | 757 | static int rxkad_encrypt_response(struct rxrpc_connection *conn, |
| 758 | struct rxkad_response *resp, |
| 759 | const struct rxkad_key *s2) |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 760 | { |
David Howells | 1db88c5 | 2019-07-30 15:56:57 +0100 | [diff] [blame] | 761 | struct skcipher_request *req; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 762 | struct rxrpc_crypt iv; |
Herbert Xu | a263629 | 2016-06-26 14:55:24 -0700 | [diff] [blame] | 763 | struct scatterlist sg[1]; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 764 | |
David Howells | 521bb30 | 2020-09-22 13:36:17 +0100 | [diff] [blame] | 765 | req = skcipher_request_alloc(&conn->rxkad.cipher->base, GFP_NOFS); |
David Howells | 1db88c5 | 2019-07-30 15:56:57 +0100 | [diff] [blame] | 766 | if (!req) |
| 767 | return -ENOMEM; |
| 768 | |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 769 | /* continue encrypting from where we left off */ |
| 770 | memcpy(&iv, s2->session_key, sizeof(iv)); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 771 | |
Herbert Xu | a263629 | 2016-06-26 14:55:24 -0700 | [diff] [blame] | 772 | sg_init_table(sg, 1); |
| 773 | sg_set_buf(sg, &resp->encrypted, sizeof(resp->encrypted)); |
David Howells | 521bb30 | 2020-09-22 13:36:17 +0100 | [diff] [blame] | 774 | skcipher_request_set_sync_tfm(req, conn->rxkad.cipher); |
Herbert Xu | 1afe593 | 2016-01-24 21:19:01 +0800 | [diff] [blame] | 775 | skcipher_request_set_callback(req, 0, NULL, NULL); |
| 776 | skcipher_request_set_crypt(req, sg, sg, sizeof(resp->encrypted), iv.x); |
Herbert Xu | 1afe593 | 2016-01-24 21:19:01 +0800 | [diff] [blame] | 777 | crypto_skcipher_encrypt(req); |
David Howells | 1db88c5 | 2019-07-30 15:56:57 +0100 | [diff] [blame] | 778 | skcipher_request_free(req); |
| 779 | return 0; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 780 | } |
| 781 | |
| 782 | /* |
| 783 | * respond to a challenge packet |
| 784 | */ |
| 785 | static int rxkad_respond_to_challenge(struct rxrpc_connection *conn, |
David Howells | a00ce28 | 2022-10-20 09:56:36 +0100 | [diff] [blame] | 786 | struct sk_buff *skb) |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 787 | { |
David Howells | 3394128 | 2009-09-14 01:17:35 +0000 | [diff] [blame] | 788 | const struct rxrpc_key_token *token; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 789 | struct rxkad_challenge challenge; |
David Howells | 8c2f826 | 2018-02-08 15:59:07 +0000 | [diff] [blame] | 790 | struct rxkad_response *resp; |
David Howells | 248f219 | 2016-09-08 11:10:12 +0100 | [diff] [blame] | 791 | struct rxrpc_skb_priv *sp = rxrpc_skb(skb); |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 792 | u32 version, nonce, min_level; |
| 793 | int ret = -EPROTO; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 794 | |
David Howells | 2cc8008 | 2022-10-19 13:49:02 +0100 | [diff] [blame] | 795 | _enter("{%d,%x}", conn->debug_id, key_serial(conn->key)); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 796 | |
David Howells | 2cc8008 | 2022-10-19 13:49:02 +0100 | [diff] [blame] | 797 | if (!conn->key) |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 798 | return rxrpc_abort_conn(conn, skb, RX_PROTOCOL_ERROR, -EPROTO, |
| 799 | rxkad_abort_chall_no_key); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 800 | |
David Howells | 2cc8008 | 2022-10-19 13:49:02 +0100 | [diff] [blame] | 801 | ret = key_validate(conn->key); |
David Howells | ef68622 | 2017-04-06 10:11:59 +0100 | [diff] [blame] | 802 | if (ret < 0) |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 803 | return rxrpc_abort_conn(conn, skb, RXKADEXPIRED, ret, |
| 804 | rxkad_abort_chall_key_expired); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 805 | |
David Howells | 775e5b7 | 2016-09-30 13:26:03 +0100 | [diff] [blame] | 806 | if (skb_copy_bits(skb, sizeof(struct rxrpc_wire_header), |
| 807 | &challenge, sizeof(challenge)) < 0) |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 808 | return rxrpc_abort_conn(conn, skb, RXKADPACKETSHORT, -EPROTO, |
| 809 | rxkad_abort_chall_short); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 810 | |
| 811 | version = ntohl(challenge.version); |
| 812 | nonce = ntohl(challenge.nonce); |
| 813 | min_level = ntohl(challenge.min_level); |
| 814 | |
David Howells | 2ebdb26 | 2022-10-20 11:51:06 +0100 | [diff] [blame] | 815 | trace_rxrpc_rx_challenge(conn, sp->hdr.serial, version, nonce, min_level); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 816 | |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 817 | if (version != RXKAD_VERSION) |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 818 | return rxrpc_abort_conn(conn, skb, RXKADINCONSISTENCY, -EPROTO, |
| 819 | rxkad_abort_chall_version); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 820 | |
David Howells | 2cc8008 | 2022-10-19 13:49:02 +0100 | [diff] [blame] | 821 | if (conn->security_level < min_level) |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 822 | return rxrpc_abort_conn(conn, skb, RXKADLEVELFAIL, -EACCES, |
| 823 | rxkad_abort_chall_level); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 824 | |
David Howells | 2cc8008 | 2022-10-19 13:49:02 +0100 | [diff] [blame] | 825 | token = conn->key->payload.data[0]; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 826 | |
| 827 | /* build the response packet */ |
David Howells | 8c2f826 | 2018-02-08 15:59:07 +0000 | [diff] [blame] | 828 | resp = kzalloc(sizeof(struct rxkad_response), GFP_NOFS); |
| 829 | if (!resp) |
| 830 | return -ENOMEM; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 831 | |
David Howells | 8c2f826 | 2018-02-08 15:59:07 +0000 | [diff] [blame] | 832 | resp->version = htonl(RXKAD_VERSION); |
| 833 | resp->encrypted.epoch = htonl(conn->proto.epoch); |
| 834 | resp->encrypted.cid = htonl(conn->proto.cid); |
| 835 | resp->encrypted.securityIndex = htonl(conn->security_ix); |
| 836 | resp->encrypted.inc_nonce = htonl(nonce + 1); |
David Howells | 2cc8008 | 2022-10-19 13:49:02 +0100 | [diff] [blame] | 837 | resp->encrypted.level = htonl(conn->security_level); |
David Howells | 8c2f826 | 2018-02-08 15:59:07 +0000 | [diff] [blame] | 838 | resp->kvno = htonl(token->kad->kvno); |
| 839 | resp->ticket_len = htonl(token->kad->ticket_len); |
| 840 | resp->encrypted.call_id[0] = htonl(conn->channels[0].call_counter); |
| 841 | resp->encrypted.call_id[1] = htonl(conn->channels[1].call_counter); |
| 842 | resp->encrypted.call_id[2] = htonl(conn->channels[2].call_counter); |
| 843 | resp->encrypted.call_id[3] = htonl(conn->channels[3].call_counter); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 844 | |
| 845 | /* calculate the response checksum and then do the encryption */ |
David Howells | 8c2f826 | 2018-02-08 15:59:07 +0000 | [diff] [blame] | 846 | rxkad_calc_response_checksum(resp); |
David Howells | 1db88c5 | 2019-07-30 15:56:57 +0100 | [diff] [blame] | 847 | ret = rxkad_encrypt_response(conn, resp, token->kad); |
| 848 | if (ret == 0) |
| 849 | ret = rxkad_send_response(conn, &sp->hdr, resp, token->kad); |
David Howells | 8c2f826 | 2018-02-08 15:59:07 +0000 | [diff] [blame] | 850 | kfree(resp); |
| 851 | return ret; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 852 | } |
| 853 | |
| 854 | /* |
| 855 | * decrypt the kerberos IV ticket in the response |
| 856 | */ |
| 857 | static int rxkad_decrypt_ticket(struct rxrpc_connection *conn, |
David Howells | ec832bd | 2020-09-16 08:00:44 +0100 | [diff] [blame] | 858 | struct key *server_key, |
David Howells | fb46f6e | 2017-04-06 10:12:00 +0100 | [diff] [blame] | 859 | struct sk_buff *skb, |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 860 | void *ticket, size_t ticket_len, |
| 861 | struct rxrpc_crypt *_session_key, |
David Howells | a00ce28 | 2022-10-20 09:56:36 +0100 | [diff] [blame] | 862 | time64_t *_expiry) |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 863 | { |
Herbert Xu | 1afe593 | 2016-01-24 21:19:01 +0800 | [diff] [blame] | 864 | struct skcipher_request *req; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 865 | struct rxrpc_crypt iv, key; |
Herbert Xu | 68e3f5d | 2007-10-27 00:52:07 -0700 | [diff] [blame] | 866 | struct scatterlist sg[1]; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 867 | struct in_addr addr; |
Eric Dumazet | 95c9617 | 2012-04-15 05:58:06 +0000 | [diff] [blame] | 868 | unsigned int life; |
Baolin Wang | 10674a0 | 2017-08-29 10:15:40 +0100 | [diff] [blame] | 869 | time64_t issue, now; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 870 | bool little_endian; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 871 | u8 *p, *q, *name, *end; |
| 872 | |
David Howells | ec832bd | 2020-09-16 08:00:44 +0100 | [diff] [blame] | 873 | _enter("{%d},{%x}", conn->debug_id, key_serial(server_key)); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 874 | |
| 875 | *_expiry = 0; |
| 876 | |
David Howells | ec832bd | 2020-09-16 08:00:44 +0100 | [diff] [blame] | 877 | ASSERT(server_key->payload.data[0] != NULL); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 878 | ASSERTCMP((unsigned long) ticket & 7UL, ==, 0); |
| 879 | |
David Howells | ec832bd | 2020-09-16 08:00:44 +0100 | [diff] [blame] | 880 | memcpy(&iv, &server_key->payload.data[2], sizeof(iv)); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 881 | |
David Howells | ec832bd | 2020-09-16 08:00:44 +0100 | [diff] [blame] | 882 | req = skcipher_request_alloc(server_key->payload.data[0], GFP_NOFS); |
David Howells | ef68622 | 2017-04-06 10:11:59 +0100 | [diff] [blame] | 883 | if (!req) |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 884 | return -ENOMEM; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 885 | |
Herbert Xu | 68e3f5d | 2007-10-27 00:52:07 -0700 | [diff] [blame] | 886 | sg_init_one(&sg[0], ticket, ticket_len); |
Herbert Xu | 1afe593 | 2016-01-24 21:19:01 +0800 | [diff] [blame] | 887 | skcipher_request_set_callback(req, 0, NULL, NULL); |
| 888 | skcipher_request_set_crypt(req, sg, sg, ticket_len, iv.x); |
Herbert Xu | 1afe593 | 2016-01-24 21:19:01 +0800 | [diff] [blame] | 889 | crypto_skcipher_decrypt(req); |
| 890 | skcipher_request_free(req); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 891 | |
| 892 | p = ticket; |
| 893 | end = p + ticket_len; |
| 894 | |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 895 | #define Z(field, fieldl) \ |
| 896 | ({ \ |
| 897 | u8 *__str = p; \ |
| 898 | q = memchr(p, 0, end - p); \ |
| 899 | if (!q || q - p > field##_SZ) \ |
| 900 | return rxrpc_abort_conn( \ |
| 901 | conn, skb, RXKADBADTICKET, -EPROTO, \ |
| 902 | rxkad_abort_resp_tkt_##fieldl); \ |
| 903 | for (; p < q; p++) \ |
| 904 | if (!isprint(*p)) \ |
| 905 | return rxrpc_abort_conn( \ |
| 906 | conn, skb, RXKADBADTICKET, -EPROTO, \ |
| 907 | rxkad_abort_resp_tkt_##fieldl); \ |
| 908 | p++; \ |
| 909 | __str; \ |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 910 | }) |
| 911 | |
| 912 | /* extract the ticket flags */ |
| 913 | _debug("KIV FLAGS: %x", *p); |
| 914 | little_endian = *p & 1; |
| 915 | p++; |
| 916 | |
| 917 | /* extract the authentication name */ |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 918 | name = Z(ANAME, aname); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 919 | _debug("KIV ANAME: %s", name); |
| 920 | |
| 921 | /* extract the principal's instance */ |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 922 | name = Z(INST, inst); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 923 | _debug("KIV INST : %s", name); |
| 924 | |
| 925 | /* extract the principal's authentication domain */ |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 926 | name = Z(REALM, realm); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 927 | _debug("KIV REALM: %s", name); |
| 928 | |
| 929 | if (end - p < 4 + 8 + 4 + 2) |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 930 | return rxrpc_abort_conn(conn, skb, RXKADBADTICKET, -EPROTO, |
| 931 | rxkad_abort_resp_tkt_short); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 932 | |
| 933 | /* get the IPv4 address of the entity that requested the ticket */ |
| 934 | memcpy(&addr, p, sizeof(addr)); |
| 935 | p += 4; |
Harvey Harrison | 21454aa | 2008-10-31 00:54:56 -0700 | [diff] [blame] | 936 | _debug("KIV ADDR : %pI4", &addr); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 937 | |
| 938 | /* get the session key from the ticket */ |
| 939 | memcpy(&key, p, sizeof(key)); |
| 940 | p += 8; |
| 941 | _debug("KIV KEY : %08x %08x", ntohl(key.n[0]), ntohl(key.n[1])); |
| 942 | memcpy(_session_key, &key, sizeof(key)); |
| 943 | |
| 944 | /* get the ticket's lifetime */ |
| 945 | life = *p++ * 5 * 60; |
| 946 | _debug("KIV LIFE : %u", life); |
| 947 | |
| 948 | /* get the issue time of the ticket */ |
| 949 | if (little_endian) { |
| 950 | __le32 stamp; |
| 951 | memcpy(&stamp, p, 4); |
Baolin Wang | 10674a0 | 2017-08-29 10:15:40 +0100 | [diff] [blame] | 952 | issue = rxrpc_u32_to_time64(le32_to_cpu(stamp)); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 953 | } else { |
| 954 | __be32 stamp; |
| 955 | memcpy(&stamp, p, 4); |
Baolin Wang | 10674a0 | 2017-08-29 10:15:40 +0100 | [diff] [blame] | 956 | issue = rxrpc_u32_to_time64(be32_to_cpu(stamp)); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 957 | } |
| 958 | p += 4; |
Baolin Wang | 10674a0 | 2017-08-29 10:15:40 +0100 | [diff] [blame] | 959 | now = ktime_get_real_seconds(); |
| 960 | _debug("KIV ISSUE: %llx [%llx]", issue, now); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 961 | |
| 962 | /* check the ticket is in date */ |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 963 | if (issue > now) |
| 964 | return rxrpc_abort_conn(conn, skb, RXKADNOAUTH, -EKEYREJECTED, |
| 965 | rxkad_abort_resp_tkt_future); |
| 966 | if (issue < now - life) |
| 967 | return rxrpc_abort_conn(conn, skb, RXKADEXPIRED, -EKEYEXPIRED, |
| 968 | rxkad_abort_resp_tkt_expired); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 969 | |
| 970 | *_expiry = issue + life; |
| 971 | |
| 972 | /* get the service name */ |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 973 | name = Z(SNAME, sname); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 974 | _debug("KIV SNAME: %s", name); |
| 975 | |
| 976 | /* get the service instance name */ |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 977 | name = Z(INST, sinst); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 978 | _debug("KIV SINST: %s", name); |
David Howells | ef68622 | 2017-04-06 10:11:59 +0100 | [diff] [blame] | 979 | return 0; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 980 | } |
| 981 | |
| 982 | /* |
| 983 | * decrypt the response packet |
| 984 | */ |
| 985 | static void rxkad_decrypt_response(struct rxrpc_connection *conn, |
| 986 | struct rxkad_response *resp, |
| 987 | const struct rxrpc_crypt *session_key) |
| 988 | { |
David Howells | 1db88c5 | 2019-07-30 15:56:57 +0100 | [diff] [blame] | 989 | struct skcipher_request *req = rxkad_ci_req; |
Herbert Xu | a263629 | 2016-06-26 14:55:24 -0700 | [diff] [blame] | 990 | struct scatterlist sg[1]; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 991 | struct rxrpc_crypt iv; |
| 992 | |
| 993 | _enter(",,%08x%08x", |
| 994 | ntohl(session_key->n[0]), ntohl(session_key->n[1])); |
| 995 | |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 996 | mutex_lock(&rxkad_ci_mutex); |
Kees Cook | 69d826f | 2018-09-18 19:10:47 -0700 | [diff] [blame] | 997 | if (crypto_sync_skcipher_setkey(rxkad_ci, session_key->x, |
David Howells | 1db88c5 | 2019-07-30 15:56:57 +0100 | [diff] [blame] | 998 | sizeof(*session_key)) < 0) |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 999 | BUG(); |
| 1000 | |
| 1001 | memcpy(&iv, session_key, sizeof(iv)); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1002 | |
Herbert Xu | a263629 | 2016-06-26 14:55:24 -0700 | [diff] [blame] | 1003 | sg_init_table(sg, 1); |
| 1004 | sg_set_buf(sg, &resp->encrypted, sizeof(resp->encrypted)); |
Kees Cook | 69d826f | 2018-09-18 19:10:47 -0700 | [diff] [blame] | 1005 | skcipher_request_set_sync_tfm(req, rxkad_ci); |
Herbert Xu | 1afe593 | 2016-01-24 21:19:01 +0800 | [diff] [blame] | 1006 | skcipher_request_set_callback(req, 0, NULL, NULL); |
| 1007 | skcipher_request_set_crypt(req, sg, sg, sizeof(resp->encrypted), iv.x); |
Herbert Xu | 1afe593 | 2016-01-24 21:19:01 +0800 | [diff] [blame] | 1008 | crypto_skcipher_decrypt(req); |
| 1009 | skcipher_request_zero(req); |
| 1010 | |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1011 | mutex_unlock(&rxkad_ci_mutex); |
| 1012 | |
| 1013 | _leave(""); |
| 1014 | } |
| 1015 | |
| 1016 | /* |
| 1017 | * verify a response |
| 1018 | */ |
| 1019 | static int rxkad_verify_response(struct rxrpc_connection *conn, |
David Howells | a00ce28 | 2022-10-20 09:56:36 +0100 | [diff] [blame] | 1020 | struct sk_buff *skb) |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1021 | { |
David Howells | 8c2f826 | 2018-02-08 15:59:07 +0000 | [diff] [blame] | 1022 | struct rxkad_response *response; |
David Howells | 248f219 | 2016-09-08 11:10:12 +0100 | [diff] [blame] | 1023 | struct rxrpc_skb_priv *sp = rxrpc_skb(skb); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1024 | struct rxrpc_crypt session_key; |
David Howells | ec832bd | 2020-09-16 08:00:44 +0100 | [diff] [blame] | 1025 | struct key *server_key; |
Baolin Wang | 10674a0 | 2017-08-29 10:15:40 +0100 | [diff] [blame] | 1026 | time64_t expiry; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1027 | void *ticket; |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 1028 | u32 version, kvno, ticket_len, level; |
Al Viro | 91e916c | 2008-03-29 03:08:38 +0000 | [diff] [blame] | 1029 | __be32 csum; |
David Howells | a1399f8 | 2016-06-27 14:39:44 +0100 | [diff] [blame] | 1030 | int ret, i; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1031 | |
David Howells | ec832bd | 2020-09-16 08:00:44 +0100 | [diff] [blame] | 1032 | _enter("{%d}", conn->debug_id); |
| 1033 | |
| 1034 | server_key = rxrpc_look_up_server_security(conn, skb, 0, 0); |
| 1035 | if (IS_ERR(server_key)) { |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 1036 | ret = PTR_ERR(server_key); |
| 1037 | switch (ret) { |
David Howells | ec832bd | 2020-09-16 08:00:44 +0100 | [diff] [blame] | 1038 | case -ENOKEY: |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 1039 | return rxrpc_abort_conn(conn, skb, RXKADUNKNOWNKEY, ret, |
| 1040 | rxkad_abort_resp_nokey); |
David Howells | ec832bd | 2020-09-16 08:00:44 +0100 | [diff] [blame] | 1041 | case -EKEYEXPIRED: |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 1042 | return rxrpc_abort_conn(conn, skb, RXKADEXPIRED, ret, |
| 1043 | rxkad_abort_resp_key_expired); |
David Howells | ec832bd | 2020-09-16 08:00:44 +0100 | [diff] [blame] | 1044 | default: |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 1045 | return rxrpc_abort_conn(conn, skb, RXKADNOAUTH, ret, |
| 1046 | rxkad_abort_resp_key_rejected); |
David Howells | ec832bd | 2020-09-16 08:00:44 +0100 | [diff] [blame] | 1047 | } |
David Howells | ec832bd | 2020-09-16 08:00:44 +0100 | [diff] [blame] | 1048 | } |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1049 | |
David Howells | 8c2f826 | 2018-02-08 15:59:07 +0000 | [diff] [blame] | 1050 | ret = -ENOMEM; |
| 1051 | response = kzalloc(sizeof(struct rxkad_response), GFP_NOFS); |
| 1052 | if (!response) |
| 1053 | goto temporary_error; |
| 1054 | |
David Howells | 775e5b7 | 2016-09-30 13:26:03 +0100 | [diff] [blame] | 1055 | if (skb_copy_bits(skb, sizeof(struct rxrpc_wire_header), |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 1056 | response, sizeof(*response)) < 0) { |
| 1057 | rxrpc_abort_conn(conn, skb, RXKADPACKETSHORT, -EPROTO, |
| 1058 | rxkad_abort_resp_short); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1059 | goto protocol_error; |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 1060 | } |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1061 | |
David Howells | 8c2f826 | 2018-02-08 15:59:07 +0000 | [diff] [blame] | 1062 | version = ntohl(response->version); |
| 1063 | ticket_len = ntohl(response->ticket_len); |
| 1064 | kvno = ntohl(response->kvno); |
David Howells | 2ebdb26 | 2022-10-20 11:51:06 +0100 | [diff] [blame] | 1065 | |
| 1066 | trace_rxrpc_rx_response(conn, sp->hdr.serial, version, kvno, ticket_len); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1067 | |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 1068 | if (version != RXKAD_VERSION) { |
| 1069 | rxrpc_abort_conn(conn, skb, RXKADINCONSISTENCY, -EPROTO, |
| 1070 | rxkad_abort_resp_version); |
David Howells | 4aa9cb3 | 2007-12-07 04:31:47 -0800 | [diff] [blame] | 1071 | goto protocol_error; |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 1072 | } |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1073 | |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 1074 | if (ticket_len < 4 || ticket_len > MAXKRB5TICKETLEN) { |
| 1075 | rxrpc_abort_conn(conn, skb, RXKADTICKETLEN, -EPROTO, |
| 1076 | rxkad_abort_resp_tkt_len); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1077 | goto protocol_error; |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 1078 | } |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1079 | |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 1080 | if (kvno >= RXKAD_TKT_TYPE_KERBEROS_V5) { |
| 1081 | rxrpc_abort_conn(conn, skb, RXKADUNKNOWNKEY, -EPROTO, |
| 1082 | rxkad_abort_resp_unknown_tkt); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1083 | goto protocol_error; |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 1084 | } |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1085 | |
| 1086 | /* extract the kerberos ticket and decrypt and decode it */ |
David Howells | ef68622 | 2017-04-06 10:11:59 +0100 | [diff] [blame] | 1087 | ret = -ENOMEM; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1088 | ticket = kmalloc(ticket_len, GFP_NOFS); |
| 1089 | if (!ticket) |
Dinghao Liu | b43c75a | 2020-08-27 16:55:46 +0100 | [diff] [blame] | 1090 | goto temporary_error_free_resp; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1091 | |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 1092 | if (skb_copy_bits(skb, sizeof(struct rxrpc_wire_header) + sizeof(*response), |
| 1093 | ticket, ticket_len) < 0) { |
| 1094 | rxrpc_abort_conn(conn, skb, RXKADPACKETSHORT, -EPROTO, |
| 1095 | rxkad_abort_resp_short_tkt); |
| 1096 | goto protocol_error; |
| 1097 | } |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1098 | |
David Howells | ec832bd | 2020-09-16 08:00:44 +0100 | [diff] [blame] | 1099 | ret = rxkad_decrypt_ticket(conn, server_key, skb, ticket, ticket_len, |
David Howells | a00ce28 | 2022-10-20 09:56:36 +0100 | [diff] [blame] | 1100 | &session_key, &expiry); |
David Howells | ef68622 | 2017-04-06 10:11:59 +0100 | [diff] [blame] | 1101 | if (ret < 0) |
Qiushi Wu | f45d01f | 2020-05-22 13:45:18 -0500 | [diff] [blame] | 1102 | goto temporary_error_free_ticket; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1103 | |
| 1104 | /* use the session key from inside the ticket to decrypt the |
| 1105 | * response */ |
David Howells | 8c2f826 | 2018-02-08 15:59:07 +0000 | [diff] [blame] | 1106 | rxkad_decrypt_response(conn, response, &session_key); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1107 | |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 1108 | if (ntohl(response->encrypted.epoch) != conn->proto.epoch || |
| 1109 | ntohl(response->encrypted.cid) != conn->proto.cid || |
| 1110 | ntohl(response->encrypted.securityIndex) != conn->security_ix) { |
| 1111 | rxrpc_abort_conn(conn, skb, RXKADSEALEDINCON, -EPROTO, |
| 1112 | rxkad_abort_resp_bad_param); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1113 | goto protocol_error_free; |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 1114 | } |
| 1115 | |
David Howells | 8c2f826 | 2018-02-08 15:59:07 +0000 | [diff] [blame] | 1116 | csum = response->encrypted.checksum; |
| 1117 | response->encrypted.checksum = 0; |
| 1118 | rxkad_calc_response_checksum(response); |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 1119 | if (response->encrypted.checksum != csum) { |
| 1120 | rxrpc_abort_conn(conn, skb, RXKADSEALEDINCON, -EPROTO, |
| 1121 | rxkad_abort_resp_bad_checksum); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1122 | goto protocol_error_free; |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 1123 | } |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1124 | |
David Howells | a1399f8 | 2016-06-27 14:39:44 +0100 | [diff] [blame] | 1125 | for (i = 0; i < RXRPC_MAXCALLS; i++) { |
David Howells | 8c2f826 | 2018-02-08 15:59:07 +0000 | [diff] [blame] | 1126 | u32 call_id = ntohl(response->encrypted.call_id[i]); |
David Howells | 9d35d88 | 2022-10-19 09:45:43 +0100 | [diff] [blame] | 1127 | u32 counter = READ_ONCE(conn->channels[i].call_counter); |
David Howells | a1399f8 | 2016-06-27 14:39:44 +0100 | [diff] [blame] | 1128 | |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 1129 | if (call_id > INT_MAX) { |
| 1130 | rxrpc_abort_conn(conn, skb, RXKADSEALEDINCON, -EPROTO, |
| 1131 | rxkad_abort_resp_bad_callid); |
David Howells | 9d35d88 | 2022-10-19 09:45:43 +0100 | [diff] [blame] | 1132 | goto protocol_error_free; |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 1133 | } |
David Howells | a1399f8 | 2016-06-27 14:39:44 +0100 | [diff] [blame] | 1134 | |
David Howells | 9d35d88 | 2022-10-19 09:45:43 +0100 | [diff] [blame] | 1135 | if (call_id < counter) { |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 1136 | rxrpc_abort_conn(conn, skb, RXKADSEALEDINCON, -EPROTO, |
| 1137 | rxkad_abort_resp_call_ctr); |
David Howells | 9d35d88 | 2022-10-19 09:45:43 +0100 | [diff] [blame] | 1138 | goto protocol_error_free; |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 1139 | } |
David Howells | fb46f6e | 2017-04-06 10:12:00 +0100 | [diff] [blame] | 1140 | |
David Howells | 9d35d88 | 2022-10-19 09:45:43 +0100 | [diff] [blame] | 1141 | if (call_id > counter) { |
| 1142 | if (conn->channels[i].call) { |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 1143 | rxrpc_abort_conn(conn, skb, RXKADSEALEDINCON, -EPROTO, |
| 1144 | rxkad_abort_resp_call_state); |
David Howells | 9d35d88 | 2022-10-19 09:45:43 +0100 | [diff] [blame] | 1145 | goto protocol_error_free; |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 1146 | } |
David Howells | a1399f8 | 2016-06-27 14:39:44 +0100 | [diff] [blame] | 1147 | conn->channels[i].call_counter = call_id; |
| 1148 | } |
| 1149 | } |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1150 | |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 1151 | if (ntohl(response->encrypted.inc_nonce) != conn->rxkad.nonce + 1) { |
| 1152 | rxrpc_abort_conn(conn, skb, RXKADOUTOFSEQUENCE, -EPROTO, |
| 1153 | rxkad_abort_resp_ooseq); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1154 | goto protocol_error_free; |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 1155 | } |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1156 | |
David Howells | 8c2f826 | 2018-02-08 15:59:07 +0000 | [diff] [blame] | 1157 | level = ntohl(response->encrypted.level); |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 1158 | if (level > RXRPC_SECURITY_ENCRYPT) { |
| 1159 | rxrpc_abort_conn(conn, skb, RXKADLEVELFAIL, -EPROTO, |
| 1160 | rxkad_abort_resp_level); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1161 | goto protocol_error_free; |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 1162 | } |
David Howells | 2cc8008 | 2022-10-19 13:49:02 +0100 | [diff] [blame] | 1163 | conn->security_level = level; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1164 | |
| 1165 | /* create a key to hold the security data and expiration time - after |
| 1166 | * this the connection security can be handled in exactly the same way |
| 1167 | * as for a client connection */ |
| 1168 | ret = rxrpc_get_server_data_key(conn, &session_key, expiry, kvno); |
David Howells | ef68622 | 2017-04-06 10:11:59 +0100 | [diff] [blame] | 1169 | if (ret < 0) |
David Howells | 8c2f826 | 2018-02-08 15:59:07 +0000 | [diff] [blame] | 1170 | goto temporary_error_free_ticket; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1171 | |
| 1172 | kfree(ticket); |
David Howells | 8c2f826 | 2018-02-08 15:59:07 +0000 | [diff] [blame] | 1173 | kfree(response); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1174 | _leave(" = 0"); |
| 1175 | return 0; |
| 1176 | |
| 1177 | protocol_error_free: |
| 1178 | kfree(ticket); |
| 1179 | protocol_error: |
David Howells | 8c2f826 | 2018-02-08 15:59:07 +0000 | [diff] [blame] | 1180 | kfree(response); |
David Howells | ec832bd | 2020-09-16 08:00:44 +0100 | [diff] [blame] | 1181 | key_put(server_key); |
David Howells | 57af281 | 2022-10-06 21:45:42 +0100 | [diff] [blame] | 1182 | return -EPROTO; |
David Howells | ef68622 | 2017-04-06 10:11:59 +0100 | [diff] [blame] | 1183 | |
David Howells | 8c2f826 | 2018-02-08 15:59:07 +0000 | [diff] [blame] | 1184 | temporary_error_free_ticket: |
David Howells | ef68622 | 2017-04-06 10:11:59 +0100 | [diff] [blame] | 1185 | kfree(ticket); |
Dinghao Liu | b43c75a | 2020-08-27 16:55:46 +0100 | [diff] [blame] | 1186 | temporary_error_free_resp: |
David Howells | 8c2f826 | 2018-02-08 15:59:07 +0000 | [diff] [blame] | 1187 | kfree(response); |
David Howells | ef68622 | 2017-04-06 10:11:59 +0100 | [diff] [blame] | 1188 | temporary_error: |
| 1189 | /* Ignore the response packet if we got a temporary error such as |
| 1190 | * ENOMEM. We just want to send the challenge again. Note that we |
| 1191 | * also come out this way if the ticket decryption fails. |
| 1192 | */ |
David Howells | ec832bd | 2020-09-16 08:00:44 +0100 | [diff] [blame] | 1193 | key_put(server_key); |
David Howells | ef68622 | 2017-04-06 10:11:59 +0100 | [diff] [blame] | 1194 | return ret; |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1195 | } |
| 1196 | |
| 1197 | /* |
| 1198 | * clear the connection security |
| 1199 | */ |
| 1200 | static void rxkad_clear(struct rxrpc_connection *conn) |
| 1201 | { |
| 1202 | _enter(""); |
| 1203 | |
David Howells | 521bb30 | 2020-09-22 13:36:17 +0100 | [diff] [blame] | 1204 | if (conn->rxkad.cipher) |
| 1205 | crypto_free_sync_skcipher(conn->rxkad.cipher); |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1206 | } |
| 1207 | |
| 1208 | /* |
David Howells | 648af7f | 2016-04-07 17:23:51 +0100 | [diff] [blame] | 1209 | * Initialise the rxkad security service. |
| 1210 | */ |
| 1211 | static int rxkad_init(void) |
| 1212 | { |
David Howells | 1db88c5 | 2019-07-30 15:56:57 +0100 | [diff] [blame] | 1213 | struct crypto_sync_skcipher *tfm; |
| 1214 | struct skcipher_request *req; |
| 1215 | |
David Howells | 648af7f | 2016-04-07 17:23:51 +0100 | [diff] [blame] | 1216 | /* pin the cipher we need so that the crypto layer doesn't invoke |
| 1217 | * keventd to go get it */ |
David Howells | 1db88c5 | 2019-07-30 15:56:57 +0100 | [diff] [blame] | 1218 | tfm = crypto_alloc_sync_skcipher("pcbc(fcrypt)", 0, 0); |
| 1219 | if (IS_ERR(tfm)) |
| 1220 | return PTR_ERR(tfm); |
| 1221 | |
| 1222 | req = skcipher_request_alloc(&tfm->base, GFP_KERNEL); |
| 1223 | if (!req) |
| 1224 | goto nomem_tfm; |
| 1225 | |
| 1226 | rxkad_ci_req = req; |
| 1227 | rxkad_ci = tfm; |
| 1228 | return 0; |
| 1229 | |
| 1230 | nomem_tfm: |
| 1231 | crypto_free_sync_skcipher(tfm); |
| 1232 | return -ENOMEM; |
David Howells | 648af7f | 2016-04-07 17:23:51 +0100 | [diff] [blame] | 1233 | } |
| 1234 | |
| 1235 | /* |
| 1236 | * Clean up the rxkad security service. |
| 1237 | */ |
| 1238 | static void rxkad_exit(void) |
| 1239 | { |
David Howells | 1db88c5 | 2019-07-30 15:56:57 +0100 | [diff] [blame] | 1240 | crypto_free_sync_skcipher(rxkad_ci); |
| 1241 | skcipher_request_free(rxkad_ci_req); |
David Howells | 648af7f | 2016-04-07 17:23:51 +0100 | [diff] [blame] | 1242 | } |
| 1243 | |
| 1244 | /* |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1245 | * RxRPC Kerberos-based security |
| 1246 | */ |
David Howells | 648af7f | 2016-04-07 17:23:51 +0100 | [diff] [blame] | 1247 | const struct rxrpc_security rxkad = { |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1248 | .name = "rxkad", |
David Howells | 8b81547 | 2009-09-14 01:17:30 +0000 | [diff] [blame] | 1249 | .security_index = RXRPC_SECURITY_RXKAD, |
David Howells | 063c60d | 2019-12-20 16:17:16 +0000 | [diff] [blame] | 1250 | .no_key_abort = RXKADUNKNOWNKEY, |
David Howells | 648af7f | 2016-04-07 17:23:51 +0100 | [diff] [blame] | 1251 | .init = rxkad_init, |
| 1252 | .exit = rxkad_exit, |
David Howells | 12da59f | 2020-09-16 08:37:29 +0100 | [diff] [blame] | 1253 | .preparse_server_key = rxkad_preparse_server_key, |
| 1254 | .free_preparse_server_key = rxkad_free_preparse_server_key, |
| 1255 | .destroy_server_key = rxkad_destroy_server_key, |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1256 | .init_connection_security = rxkad_init_connection_security, |
David Howells | d7d775b | 2020-09-16 01:34:39 +0100 | [diff] [blame] | 1257 | .how_much_data = rxkad_how_much_data, |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1258 | .secure_packet = rxkad_secure_packet, |
| 1259 | .verify_packet = rxkad_verify_packet, |
David Howells | 1db88c5 | 2019-07-30 15:56:57 +0100 | [diff] [blame] | 1260 | .free_call_crypto = rxkad_free_call_crypto, |
David Howells | 17926a7 | 2007-04-26 15:48:28 -0700 | [diff] [blame] | 1261 | .issue_challenge = rxkad_issue_challenge, |
| 1262 | .respond_to_challenge = rxkad_respond_to_challenge, |
| 1263 | .verify_response = rxkad_verify_response, |
| 1264 | .clear = rxkad_clear, |
| 1265 | }; |