Thomas Gleixner | 97fb5e8 | 2019-05-29 07:17:58 -0700 | [diff] [blame] | 1 | // SPDX-License-Identifier: GPL-2.0-only |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 2 | /* |
| 3 | * Copyright (c) 2010-2014, The Linux Foundation. All rights reserved. |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 4 | */ |
| 5 | |
| 6 | #include <linux/device.h> |
| 7 | #include <linux/interrupt.h> |
Eneas U de Queiroz | ce163ba | 2020-02-07 12:02:26 -0300 | [diff] [blame] | 8 | #include <linux/moduleparam.h> |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 9 | #include <linux/types.h> |
| 10 | #include <crypto/aes.h> |
Ard Biesheuvel | f96c897 | 2019-08-15 12:01:01 +0300 | [diff] [blame] | 11 | #include <crypto/internal/des.h> |
Herbert Xu | 2d20ce07 | 2016-06-29 18:04:04 +0800 | [diff] [blame] | 12 | #include <crypto/internal/skcipher.h> |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 13 | |
| 14 | #include "cipher.h" |
| 15 | |
Eneas U de Queiroz | ce163ba | 2020-02-07 12:02:26 -0300 | [diff] [blame] | 16 | static unsigned int aes_sw_max_len = CONFIG_CRYPTO_DEV_QCE_SW_MAX_LEN; |
| 17 | module_param(aes_sw_max_len, uint, 0644); |
| 18 | MODULE_PARM_DESC(aes_sw_max_len, |
| 19 | "Only use hardware for AES requests larger than this " |
| 20 | "[0=always use hardware; anything <16 breaks AES-GCM; default=" |
Eneas U de Queiroz | d069b20 | 2020-03-04 15:24:55 -0300 | [diff] [blame] | 21 | __stringify(CONFIG_CRYPTO_DEV_QCE_SW_MAX_LEN)"]"); |
Eneas U de Queiroz | ce163ba | 2020-02-07 12:02:26 -0300 | [diff] [blame] | 22 | |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 23 | static LIST_HEAD(skcipher_algs); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 24 | |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 25 | static void qce_skcipher_done(void *data) |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 26 | { |
| 27 | struct crypto_async_request *async_req = data; |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 28 | struct skcipher_request *req = skcipher_request_cast(async_req); |
| 29 | struct qce_cipher_reqctx *rctx = skcipher_request_ctx(req); |
| 30 | struct qce_alg_template *tmpl = to_cipher_tmpl(crypto_skcipher_reqtfm(req)); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 31 | struct qce_device *qce = tmpl->qce; |
Eneas U de Queiroz | 3e806a1 | 2019-12-20 16:02:16 -0300 | [diff] [blame] | 32 | struct qce_result_dump *result_buf = qce->dma.result_buf; |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 33 | enum dma_data_direction dir_src, dir_dst; |
| 34 | u32 status; |
| 35 | int error; |
| 36 | bool diff_dst; |
| 37 | |
| 38 | diff_dst = (req->src != req->dst) ? true : false; |
| 39 | dir_src = diff_dst ? DMA_TO_DEVICE : DMA_BIDIRECTIONAL; |
| 40 | dir_dst = diff_dst ? DMA_FROM_DEVICE : DMA_BIDIRECTIONAL; |
| 41 | |
| 42 | error = qce_dma_terminate_all(&qce->dma); |
| 43 | if (error) |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 44 | dev_dbg(qce->dev, "skcipher dma termination error (%d)\n", |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 45 | error); |
| 46 | |
| 47 | if (diff_dst) |
LABBE Corentin | fea4045 | 2015-10-02 08:01:02 +0200 | [diff] [blame] | 48 | dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src); |
| 49 | dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 50 | |
| 51 | sg_free_table(&rctx->dst_tbl); |
| 52 | |
| 53 | error = qce_check_status(qce, &status); |
| 54 | if (error < 0) |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 55 | dev_dbg(qce->dev, "skcipher operation error (%x)\n", status); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 56 | |
Eneas U de Queiroz | 3e806a1 | 2019-12-20 16:02:16 -0300 | [diff] [blame] | 57 | memcpy(rctx->iv, result_buf->encr_cntr_iv, rctx->ivsize); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 58 | qce->async_req_done(tmpl->qce, error); |
| 59 | } |
| 60 | |
| 61 | static int |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 62 | qce_skcipher_async_req_handle(struct crypto_async_request *async_req) |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 63 | { |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 64 | struct skcipher_request *req = skcipher_request_cast(async_req); |
| 65 | struct qce_cipher_reqctx *rctx = skcipher_request_ctx(req); |
| 66 | struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req); |
| 67 | struct qce_alg_template *tmpl = to_cipher_tmpl(crypto_skcipher_reqtfm(req)); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 68 | struct qce_device *qce = tmpl->qce; |
| 69 | enum dma_data_direction dir_src, dir_dst; |
| 70 | struct scatterlist *sg; |
| 71 | bool diff_dst; |
| 72 | gfp_t gfp; |
| 73 | int ret; |
| 74 | |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 75 | rctx->iv = req->iv; |
| 76 | rctx->ivsize = crypto_skcipher_ivsize(skcipher); |
| 77 | rctx->cryptlen = req->cryptlen; |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 78 | |
| 79 | diff_dst = (req->src != req->dst) ? true : false; |
| 80 | dir_src = diff_dst ? DMA_TO_DEVICE : DMA_BIDIRECTIONAL; |
| 81 | dir_dst = diff_dst ? DMA_FROM_DEVICE : DMA_BIDIRECTIONAL; |
| 82 | |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 83 | rctx->src_nents = sg_nents_for_len(req->src, req->cryptlen); |
LABBE Corentin | fea4045 | 2015-10-02 08:01:02 +0200 | [diff] [blame] | 84 | if (diff_dst) |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 85 | rctx->dst_nents = sg_nents_for_len(req->dst, req->cryptlen); |
LABBE Corentin | fea4045 | 2015-10-02 08:01:02 +0200 | [diff] [blame] | 86 | else |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 87 | rctx->dst_nents = rctx->src_nents; |
LABBE Corentin | 4fa9948 | 2015-11-04 21:13:36 +0100 | [diff] [blame] | 88 | if (rctx->src_nents < 0) { |
| 89 | dev_err(qce->dev, "Invalid numbers of src SG.\n"); |
| 90 | return rctx->src_nents; |
| 91 | } |
| 92 | if (rctx->dst_nents < 0) { |
| 93 | dev_err(qce->dev, "Invalid numbers of dst SG.\n"); |
| 94 | return -rctx->dst_nents; |
| 95 | } |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 96 | |
| 97 | rctx->dst_nents += 1; |
| 98 | |
| 99 | gfp = (req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? |
| 100 | GFP_KERNEL : GFP_ATOMIC; |
| 101 | |
| 102 | ret = sg_alloc_table(&rctx->dst_tbl, rctx->dst_nents, gfp); |
| 103 | if (ret) |
| 104 | return ret; |
| 105 | |
| 106 | sg_init_one(&rctx->result_sg, qce->dma.result_buf, QCE_RESULT_BUF_SZ); |
| 107 | |
Eneas U de Queiroz | d6364b8 | 2020-02-07 12:02:25 -0300 | [diff] [blame] | 108 | sg = qce_sgtable_add(&rctx->dst_tbl, req->dst, req->cryptlen); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 109 | if (IS_ERR(sg)) { |
| 110 | ret = PTR_ERR(sg); |
| 111 | goto error_free; |
| 112 | } |
| 113 | |
Eneas U de Queiroz | d6364b8 | 2020-02-07 12:02:25 -0300 | [diff] [blame] | 114 | sg = qce_sgtable_add(&rctx->dst_tbl, &rctx->result_sg, |
| 115 | QCE_RESULT_BUF_SZ); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 116 | if (IS_ERR(sg)) { |
| 117 | ret = PTR_ERR(sg); |
| 118 | goto error_free; |
| 119 | } |
| 120 | |
| 121 | sg_mark_end(sg); |
| 122 | rctx->dst_sg = rctx->dst_tbl.sgl; |
| 123 | |
LABBE Corentin | fea4045 | 2015-10-02 08:01:02 +0200 | [diff] [blame] | 124 | ret = dma_map_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 125 | if (ret < 0) |
| 126 | goto error_free; |
| 127 | |
| 128 | if (diff_dst) { |
LABBE Corentin | fea4045 | 2015-10-02 08:01:02 +0200 | [diff] [blame] | 129 | ret = dma_map_sg(qce->dev, req->src, rctx->src_nents, dir_src); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 130 | if (ret < 0) |
| 131 | goto error_unmap_dst; |
| 132 | rctx->src_sg = req->src; |
| 133 | } else { |
| 134 | rctx->src_sg = rctx->dst_sg; |
| 135 | } |
| 136 | |
| 137 | ret = qce_dma_prep_sgs(&qce->dma, rctx->src_sg, rctx->src_nents, |
| 138 | rctx->dst_sg, rctx->dst_nents, |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 139 | qce_skcipher_done, async_req); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 140 | if (ret) |
| 141 | goto error_unmap_src; |
| 142 | |
| 143 | qce_dma_issue_pending(&qce->dma); |
| 144 | |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 145 | ret = qce_start(async_req, tmpl->crypto_alg_type, req->cryptlen, 0); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 146 | if (ret) |
| 147 | goto error_terminate; |
| 148 | |
| 149 | return 0; |
| 150 | |
| 151 | error_terminate: |
| 152 | qce_dma_terminate_all(&qce->dma); |
| 153 | error_unmap_src: |
| 154 | if (diff_dst) |
LABBE Corentin | fea4045 | 2015-10-02 08:01:02 +0200 | [diff] [blame] | 155 | dma_unmap_sg(qce->dev, req->src, rctx->src_nents, dir_src); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 156 | error_unmap_dst: |
LABBE Corentin | fea4045 | 2015-10-02 08:01:02 +0200 | [diff] [blame] | 157 | dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 158 | error_free: |
| 159 | sg_free_table(&rctx->dst_tbl); |
| 160 | return ret; |
| 161 | } |
| 162 | |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 163 | static int qce_skcipher_setkey(struct crypto_skcipher *ablk, const u8 *key, |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 164 | unsigned int keylen) |
| 165 | { |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 166 | struct crypto_tfm *tfm = crypto_skcipher_tfm(ablk); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 167 | struct qce_cipher_ctx *ctx = crypto_tfm_ctx(tfm); |
Eneas U de Queiroz | 7de4c2b | 2019-12-20 16:02:14 -0300 | [diff] [blame] | 168 | unsigned long flags = to_cipher_tmpl(ablk)->alg_flags; |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 169 | int ret; |
| 170 | |
| 171 | if (!key || !keylen) |
| 172 | return -EINVAL; |
| 173 | |
Eneas U de Queiroz | 7de4c2b | 2019-12-20 16:02:14 -0300 | [diff] [blame] | 174 | switch (IS_XTS(flags) ? keylen >> 1 : keylen) { |
Ard Biesheuvel | f96c897 | 2019-08-15 12:01:01 +0300 | [diff] [blame] | 175 | case AES_KEYSIZE_128: |
| 176 | case AES_KEYSIZE_256: |
Eneas U de Queiroz | ce163ba | 2020-02-07 12:02:26 -0300 | [diff] [blame] | 177 | memcpy(ctx->enc_key, key, keylen); |
Ard Biesheuvel | f96c897 | 2019-08-15 12:01:01 +0300 | [diff] [blame] | 178 | break; |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 179 | } |
| 180 | |
Ard Biesheuvel | 90e2f78 | 2020-07-07 09:32:01 +0300 | [diff] [blame] | 181 | ret = crypto_skcipher_setkey(ctx->fallback, key, keylen); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 182 | if (!ret) |
| 183 | ctx->enc_keylen = keylen; |
| 184 | return ret; |
Ard Biesheuvel | f96c897 | 2019-08-15 12:01:01 +0300 | [diff] [blame] | 185 | } |
| 186 | |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 187 | static int qce_des_setkey(struct crypto_skcipher *ablk, const u8 *key, |
Ard Biesheuvel | f96c897 | 2019-08-15 12:01:01 +0300 | [diff] [blame] | 188 | unsigned int keylen) |
| 189 | { |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 190 | struct qce_cipher_ctx *ctx = crypto_skcipher_ctx(ablk); |
Ard Biesheuvel | f96c897 | 2019-08-15 12:01:01 +0300 | [diff] [blame] | 191 | int err; |
| 192 | |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 193 | err = verify_skcipher_des_key(ablk, key); |
Ard Biesheuvel | f96c897 | 2019-08-15 12:01:01 +0300 | [diff] [blame] | 194 | if (err) |
| 195 | return err; |
| 196 | |
| 197 | ctx->enc_keylen = keylen; |
| 198 | memcpy(ctx->enc_key, key, keylen); |
| 199 | return 0; |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 200 | } |
| 201 | |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 202 | static int qce_des3_setkey(struct crypto_skcipher *ablk, const u8 *key, |
Herbert Xu | 5feaaae | 2019-04-11 16:51:16 +0800 | [diff] [blame] | 203 | unsigned int keylen) |
| 204 | { |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 205 | struct qce_cipher_ctx *ctx = crypto_skcipher_ctx(ablk); |
Herbert Xu | 5feaaae | 2019-04-11 16:51:16 +0800 | [diff] [blame] | 206 | int err; |
| 207 | |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 208 | err = verify_skcipher_des3_key(ablk, key); |
Ard Biesheuvel | f96c897 | 2019-08-15 12:01:01 +0300 | [diff] [blame] | 209 | if (err) |
Herbert Xu | 5feaaae | 2019-04-11 16:51:16 +0800 | [diff] [blame] | 210 | return err; |
Herbert Xu | 5feaaae | 2019-04-11 16:51:16 +0800 | [diff] [blame] | 211 | |
| 212 | ctx->enc_keylen = keylen; |
| 213 | memcpy(ctx->enc_key, key, keylen); |
| 214 | return 0; |
| 215 | } |
| 216 | |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 217 | static int qce_skcipher_crypt(struct skcipher_request *req, int encrypt) |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 218 | { |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 219 | struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); |
| 220 | struct qce_cipher_ctx *ctx = crypto_skcipher_ctx(tfm); |
| 221 | struct qce_cipher_reqctx *rctx = skcipher_request_ctx(req); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 222 | struct qce_alg_template *tmpl = to_cipher_tmpl(tfm); |
Eneas U de Queiroz | 7de4c2b | 2019-12-20 16:02:14 -0300 | [diff] [blame] | 223 | int keylen; |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 224 | int ret; |
| 225 | |
| 226 | rctx->flags = tmpl->alg_flags; |
| 227 | rctx->flags |= encrypt ? QCE_ENCRYPT : QCE_DECRYPT; |
Eneas U de Queiroz | 7de4c2b | 2019-12-20 16:02:14 -0300 | [diff] [blame] | 228 | keylen = IS_XTS(rctx->flags) ? ctx->enc_keylen >> 1 : ctx->enc_keylen; |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 229 | |
Eneas U de Queiroz | 7f19380 | 2020-02-07 12:02:27 -0300 | [diff] [blame] | 230 | /* qce is hanging when AES-XTS request len > QCE_SECTOR_SIZE and |
| 231 | * is not a multiple of it; pass such requests to the fallback |
| 232 | */ |
Eneas U de Queiroz | ce163ba | 2020-02-07 12:02:26 -0300 | [diff] [blame] | 233 | if (IS_AES(rctx->flags) && |
Eneas U de Queiroz | 7f19380 | 2020-02-07 12:02:27 -0300 | [diff] [blame] | 234 | (((keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_256) || |
| 235 | req->cryptlen <= aes_sw_max_len) || |
| 236 | (IS_XTS(rctx->flags) && req->cryptlen > QCE_SECTOR_SIZE && |
| 237 | req->cryptlen % QCE_SECTOR_SIZE))) { |
Ard Biesheuvel | 90e2f78 | 2020-07-07 09:32:01 +0300 | [diff] [blame] | 238 | skcipher_request_set_tfm(&rctx->fallback_req, ctx->fallback); |
| 239 | skcipher_request_set_callback(&rctx->fallback_req, |
| 240 | req->base.flags, |
| 241 | req->base.complete, |
| 242 | req->base.data); |
| 243 | skcipher_request_set_crypt(&rctx->fallback_req, req->src, |
| 244 | req->dst, req->cryptlen, req->iv); |
| 245 | ret = encrypt ? crypto_skcipher_encrypt(&rctx->fallback_req) : |
| 246 | crypto_skcipher_decrypt(&rctx->fallback_req); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 247 | return ret; |
| 248 | } |
| 249 | |
| 250 | return tmpl->qce->async_req_enqueue(tmpl->qce, &req->base); |
| 251 | } |
| 252 | |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 253 | static int qce_skcipher_encrypt(struct skcipher_request *req) |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 254 | { |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 255 | return qce_skcipher_crypt(req, 1); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 256 | } |
| 257 | |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 258 | static int qce_skcipher_decrypt(struct skcipher_request *req) |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 259 | { |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 260 | return qce_skcipher_crypt(req, 0); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 261 | } |
| 262 | |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 263 | static int qce_skcipher_init(struct crypto_skcipher *tfm) |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 264 | { |
Ard Biesheuvel | 90e2f78 | 2020-07-07 09:32:01 +0300 | [diff] [blame] | 265 | /* take the size without the fallback skcipher_request at the end */ |
| 266 | crypto_skcipher_set_reqsize(tfm, offsetof(struct qce_cipher_reqctx, |
| 267 | fallback_req)); |
Eneas U de Queiroz | 8ceda88 | 2019-12-20 16:02:17 -0300 | [diff] [blame] | 268 | return 0; |
| 269 | } |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 270 | |
Eneas U de Queiroz | 8ceda88 | 2019-12-20 16:02:17 -0300 | [diff] [blame] | 271 | static int qce_skcipher_init_fallback(struct crypto_skcipher *tfm) |
| 272 | { |
| 273 | struct qce_cipher_ctx *ctx = crypto_skcipher_ctx(tfm); |
| 274 | |
Ard Biesheuvel | 90e2f78 | 2020-07-07 09:32:01 +0300 | [diff] [blame] | 275 | ctx->fallback = crypto_alloc_skcipher(crypto_tfm_alg_name(&tfm->base), |
| 276 | 0, CRYPTO_ALG_NEED_FALLBACK); |
| 277 | if (IS_ERR(ctx->fallback)) |
| 278 | return PTR_ERR(ctx->fallback); |
| 279 | |
| 280 | crypto_skcipher_set_reqsize(tfm, sizeof(struct qce_cipher_reqctx) + |
| 281 | crypto_skcipher_reqsize(ctx->fallback)); |
| 282 | return 0; |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 283 | } |
| 284 | |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 285 | static void qce_skcipher_exit(struct crypto_skcipher *tfm) |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 286 | { |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 287 | struct qce_cipher_ctx *ctx = crypto_skcipher_ctx(tfm); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 288 | |
Ard Biesheuvel | 90e2f78 | 2020-07-07 09:32:01 +0300 | [diff] [blame] | 289 | crypto_free_skcipher(ctx->fallback); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 290 | } |
| 291 | |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 292 | struct qce_skcipher_def { |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 293 | unsigned long flags; |
| 294 | const char *name; |
| 295 | const char *drv_name; |
| 296 | unsigned int blocksize; |
Eneas U de Queiroz | bb5c863 | 2019-12-20 16:02:13 -0300 | [diff] [blame] | 297 | unsigned int chunksize; |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 298 | unsigned int ivsize; |
| 299 | unsigned int min_keysize; |
| 300 | unsigned int max_keysize; |
| 301 | }; |
| 302 | |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 303 | static const struct qce_skcipher_def skcipher_def[] = { |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 304 | { |
| 305 | .flags = QCE_ALG_AES | QCE_MODE_ECB, |
| 306 | .name = "ecb(aes)", |
| 307 | .drv_name = "ecb-aes-qce", |
| 308 | .blocksize = AES_BLOCK_SIZE, |
| 309 | .ivsize = AES_BLOCK_SIZE, |
| 310 | .min_keysize = AES_MIN_KEY_SIZE, |
| 311 | .max_keysize = AES_MAX_KEY_SIZE, |
| 312 | }, |
| 313 | { |
| 314 | .flags = QCE_ALG_AES | QCE_MODE_CBC, |
| 315 | .name = "cbc(aes)", |
| 316 | .drv_name = "cbc-aes-qce", |
| 317 | .blocksize = AES_BLOCK_SIZE, |
| 318 | .ivsize = AES_BLOCK_SIZE, |
| 319 | .min_keysize = AES_MIN_KEY_SIZE, |
| 320 | .max_keysize = AES_MAX_KEY_SIZE, |
| 321 | }, |
| 322 | { |
| 323 | .flags = QCE_ALG_AES | QCE_MODE_CTR, |
| 324 | .name = "ctr(aes)", |
| 325 | .drv_name = "ctr-aes-qce", |
Eneas U de Queiroz | bb5c863 | 2019-12-20 16:02:13 -0300 | [diff] [blame] | 326 | .blocksize = 1, |
| 327 | .chunksize = AES_BLOCK_SIZE, |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 328 | .ivsize = AES_BLOCK_SIZE, |
| 329 | .min_keysize = AES_MIN_KEY_SIZE, |
| 330 | .max_keysize = AES_MAX_KEY_SIZE, |
| 331 | }, |
| 332 | { |
| 333 | .flags = QCE_ALG_AES | QCE_MODE_XTS, |
| 334 | .name = "xts(aes)", |
| 335 | .drv_name = "xts-aes-qce", |
| 336 | .blocksize = AES_BLOCK_SIZE, |
| 337 | .ivsize = AES_BLOCK_SIZE, |
Eneas U de Queiroz | 7de4c2b | 2019-12-20 16:02:14 -0300 | [diff] [blame] | 338 | .min_keysize = AES_MIN_KEY_SIZE * 2, |
| 339 | .max_keysize = AES_MAX_KEY_SIZE * 2, |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 340 | }, |
| 341 | { |
| 342 | .flags = QCE_ALG_DES | QCE_MODE_ECB, |
| 343 | .name = "ecb(des)", |
| 344 | .drv_name = "ecb-des-qce", |
| 345 | .blocksize = DES_BLOCK_SIZE, |
| 346 | .ivsize = 0, |
| 347 | .min_keysize = DES_KEY_SIZE, |
| 348 | .max_keysize = DES_KEY_SIZE, |
| 349 | }, |
| 350 | { |
| 351 | .flags = QCE_ALG_DES | QCE_MODE_CBC, |
| 352 | .name = "cbc(des)", |
| 353 | .drv_name = "cbc-des-qce", |
| 354 | .blocksize = DES_BLOCK_SIZE, |
| 355 | .ivsize = DES_BLOCK_SIZE, |
| 356 | .min_keysize = DES_KEY_SIZE, |
| 357 | .max_keysize = DES_KEY_SIZE, |
| 358 | }, |
| 359 | { |
| 360 | .flags = QCE_ALG_3DES | QCE_MODE_ECB, |
| 361 | .name = "ecb(des3_ede)", |
| 362 | .drv_name = "ecb-3des-qce", |
| 363 | .blocksize = DES3_EDE_BLOCK_SIZE, |
| 364 | .ivsize = 0, |
| 365 | .min_keysize = DES3_EDE_KEY_SIZE, |
| 366 | .max_keysize = DES3_EDE_KEY_SIZE, |
| 367 | }, |
| 368 | { |
| 369 | .flags = QCE_ALG_3DES | QCE_MODE_CBC, |
| 370 | .name = "cbc(des3_ede)", |
| 371 | .drv_name = "cbc-3des-qce", |
| 372 | .blocksize = DES3_EDE_BLOCK_SIZE, |
| 373 | .ivsize = DES3_EDE_BLOCK_SIZE, |
| 374 | .min_keysize = DES3_EDE_KEY_SIZE, |
| 375 | .max_keysize = DES3_EDE_KEY_SIZE, |
| 376 | }, |
| 377 | }; |
| 378 | |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 379 | static int qce_skcipher_register_one(const struct qce_skcipher_def *def, |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 380 | struct qce_device *qce) |
| 381 | { |
| 382 | struct qce_alg_template *tmpl; |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 383 | struct skcipher_alg *alg; |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 384 | int ret; |
| 385 | |
| 386 | tmpl = kzalloc(sizeof(*tmpl), GFP_KERNEL); |
| 387 | if (!tmpl) |
| 388 | return -ENOMEM; |
| 389 | |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 390 | alg = &tmpl->alg.skcipher; |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 391 | |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 392 | snprintf(alg->base.cra_name, CRYPTO_MAX_ALG_NAME, "%s", def->name); |
| 393 | snprintf(alg->base.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s", |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 394 | def->drv_name); |
| 395 | |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 396 | alg->base.cra_blocksize = def->blocksize; |
Eneas U de Queiroz | bb5c863 | 2019-12-20 16:02:13 -0300 | [diff] [blame] | 397 | alg->chunksize = def->chunksize; |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 398 | alg->ivsize = def->ivsize; |
| 399 | alg->min_keysize = def->min_keysize; |
| 400 | alg->max_keysize = def->max_keysize; |
| 401 | alg->setkey = IS_3DES(def->flags) ? qce_des3_setkey : |
| 402 | IS_DES(def->flags) ? qce_des_setkey : |
| 403 | qce_skcipher_setkey; |
| 404 | alg->encrypt = qce_skcipher_encrypt; |
| 405 | alg->decrypt = qce_skcipher_decrypt; |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 406 | |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 407 | alg->base.cra_priority = 300; |
| 408 | alg->base.cra_flags = CRYPTO_ALG_ASYNC | |
Mikulas Patocka | b8aa7dc | 2020-07-09 23:20:41 -0700 | [diff] [blame] | 409 | CRYPTO_ALG_ALLOCATES_MEMORY | |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 410 | CRYPTO_ALG_KERN_DRIVER_ONLY; |
| 411 | alg->base.cra_ctxsize = sizeof(struct qce_cipher_ctx); |
| 412 | alg->base.cra_alignmask = 0; |
| 413 | alg->base.cra_module = THIS_MODULE; |
| 414 | |
Eneas U de Queiroz | 8ceda88 | 2019-12-20 16:02:17 -0300 | [diff] [blame] | 415 | if (IS_AES(def->flags)) { |
| 416 | alg->base.cra_flags |= CRYPTO_ALG_NEED_FALLBACK; |
| 417 | alg->init = qce_skcipher_init_fallback; |
| 418 | alg->exit = qce_skcipher_exit; |
| 419 | } else { |
| 420 | alg->init = qce_skcipher_init; |
| 421 | } |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 422 | |
| 423 | INIT_LIST_HEAD(&tmpl->entry); |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 424 | tmpl->crypto_alg_type = CRYPTO_ALG_TYPE_SKCIPHER; |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 425 | tmpl->alg_flags = def->flags; |
| 426 | tmpl->qce = qce; |
| 427 | |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 428 | ret = crypto_register_skcipher(alg); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 429 | if (ret) { |
| 430 | kfree(tmpl); |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 431 | dev_err(qce->dev, "%s registration failed\n", alg->base.cra_name); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 432 | return ret; |
| 433 | } |
| 434 | |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 435 | list_add_tail(&tmpl->entry, &skcipher_algs); |
| 436 | dev_dbg(qce->dev, "%s is registered\n", alg->base.cra_name); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 437 | return 0; |
| 438 | } |
| 439 | |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 440 | static void qce_skcipher_unregister(struct qce_device *qce) |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 441 | { |
| 442 | struct qce_alg_template *tmpl, *n; |
| 443 | |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 444 | list_for_each_entry_safe(tmpl, n, &skcipher_algs, entry) { |
| 445 | crypto_unregister_skcipher(&tmpl->alg.skcipher); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 446 | list_del(&tmpl->entry); |
| 447 | kfree(tmpl); |
| 448 | } |
| 449 | } |
| 450 | |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 451 | static int qce_skcipher_register(struct qce_device *qce) |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 452 | { |
| 453 | int ret, i; |
| 454 | |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 455 | for (i = 0; i < ARRAY_SIZE(skcipher_def); i++) { |
| 456 | ret = qce_skcipher_register_one(&skcipher_def[i], qce); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 457 | if (ret) |
| 458 | goto err; |
| 459 | } |
| 460 | |
| 461 | return 0; |
| 462 | err: |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 463 | qce_skcipher_unregister(qce); |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 464 | return ret; |
| 465 | } |
| 466 | |
Ard Biesheuvel | 8bf0871 | 2019-11-09 18:09:45 +0100 | [diff] [blame] | 467 | const struct qce_algo_ops skcipher_ops = { |
| 468 | .type = CRYPTO_ALG_TYPE_SKCIPHER, |
| 469 | .register_algs = qce_skcipher_register, |
| 470 | .unregister_algs = qce_skcipher_unregister, |
| 471 | .async_req_handle = qce_skcipher_async_req_handle, |
Stanimir Varbanov | ec8f5d8 | 2014-06-25 19:28:57 +0300 | [diff] [blame] | 472 | }; |