blob: 8cdbdbe356815efbc0deb2d978a52a4a6ccc50cc [file] [log] [blame]
Antoine Tenart301422e2018-07-13 16:51:37 +02001// SPDX-License-Identifier: GPL-2.0
Antoine Ténart1b44c5a2017-05-24 16:10:34 +02002/*
3 * Copyright (C) 2017 Marvell
4 *
5 * Antoine Tenart <antoine.tenart@free-electrons.com>
Antoine Ténart1b44c5a2017-05-24 16:10:34 +02006 */
7
8#include <linux/device.h>
9#include <linux/dma-mapping.h>
10#include <linux/dmapool.h>
11
Antoine Tenartf6beaea2018-05-14 15:11:02 +020012#include <crypto/aead.h>
Antoine Ténart1b44c5a2017-05-24 16:10:34 +020013#include <crypto/aes.h>
Antoine Tenartf6beaea2018-05-14 15:11:02 +020014#include <crypto/authenc.h>
Ofer Heifetza7dea8c2018-06-28 17:21:55 +020015#include <crypto/des.h>
Antoine Tenartf6beaea2018-05-14 15:11:02 +020016#include <crypto/sha.h>
Antoine Ténart1b44c5a2017-05-24 16:10:34 +020017#include <crypto/skcipher.h>
Antoine Tenartf6beaea2018-05-14 15:11:02 +020018#include <crypto/internal/aead.h>
Ofer Heifetz1eb7b402017-12-11 12:10:55 +010019#include <crypto/internal/skcipher.h>
Antoine Ténart1b44c5a2017-05-24 16:10:34 +020020
21#include "safexcel.h"
22
23enum safexcel_cipher_direction {
24 SAFEXCEL_ENCRYPT,
25 SAFEXCEL_DECRYPT,
26};
27
Ofer Heifetza7dea8c2018-06-28 17:21:55 +020028enum safexcel_cipher_alg {
29 SAFEXCEL_DES,
Ofer Heifetz624698792018-06-28 17:21:56 +020030 SAFEXCEL_3DES,
Ofer Heifetza7dea8c2018-06-28 17:21:55 +020031 SAFEXCEL_AES,
32};
33
Antoine Ténart1b44c5a2017-05-24 16:10:34 +020034struct safexcel_cipher_ctx {
35 struct safexcel_context base;
36 struct safexcel_crypto_priv *priv;
37
Antoine Ténart1b44c5a2017-05-24 16:10:34 +020038 u32 mode;
Ofer Heifetza7dea8c2018-06-28 17:21:55 +020039 enum safexcel_cipher_alg alg;
Antoine Tenartf6beaea2018-05-14 15:11:02 +020040 bool aead;
Antoine Ténart1b44c5a2017-05-24 16:10:34 +020041
42 __le32 key[8];
43 unsigned int key_len;
Antoine Tenartf6beaea2018-05-14 15:11:02 +020044
45 /* All the below is AEAD specific */
Ofer Heifetza7dea8c2018-06-28 17:21:55 +020046 u32 hash_alg;
Antoine Tenartf6beaea2018-05-14 15:11:02 +020047 u32 state_sz;
Antoine Tenart87eee122018-05-29 14:13:48 +020048 u32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
49 u32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
Antoine Ténart1b44c5a2017-05-24 16:10:34 +020050};
51
Ofer Heifetz1eb7b402017-12-11 12:10:55 +010052struct safexcel_cipher_req {
Ofer Heifetz847ccfc2017-12-14 15:26:49 +010053 enum safexcel_cipher_direction direction;
Antoine Tenart89332592019-05-27 16:51:06 +020054 /* Number of result descriptors associated to the request */
55 unsigned int rdescs;
Ofer Heifetz1eb7b402017-12-11 12:10:55 +010056 bool needs_inv;
57};
58
Antoine Tenart8ac12832018-05-14 15:10:56 +020059static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
60 struct safexcel_command_desc *cdesc,
61 u32 length)
Antoine Ténart1b44c5a2017-05-24 16:10:34 +020062{
Antoine Ténart1b44c5a2017-05-24 16:10:34 +020063 struct safexcel_token *token;
Antoine Tenart57660b12019-05-27 16:51:03 +020064 u32 offset = 0, block_sz = 0;
Antoine Ténart1b44c5a2017-05-24 16:10:34 +020065
66 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) {
Ofer Heifetza7dea8c2018-06-28 17:21:55 +020067 switch (ctx->alg) {
68 case SAFEXCEL_DES:
Antoine Tenart57660b12019-05-27 16:51:03 +020069 block_sz = DES_BLOCK_SIZE;
Ofer Heifetza7dea8c2018-06-28 17:21:55 +020070 cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
71 break;
Ofer Heifetz624698792018-06-28 17:21:56 +020072 case SAFEXCEL_3DES:
Antoine Tenart57660b12019-05-27 16:51:03 +020073 block_sz = DES3_EDE_BLOCK_SIZE;
Ofer Heifetz624698792018-06-28 17:21:56 +020074 cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
75 break;
Ofer Heifetza7dea8c2018-06-28 17:21:55 +020076 case SAFEXCEL_AES:
Antoine Tenart57660b12019-05-27 16:51:03 +020077 block_sz = AES_BLOCK_SIZE;
Ofer Heifetza7dea8c2018-06-28 17:21:55 +020078 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
79 break;
80 }
Antoine Tenart57660b12019-05-27 16:51:03 +020081
82 offset = block_sz / sizeof(u32);
83 memcpy(cdesc->control_data.token, iv, block_sz);
Antoine Ténart1b44c5a2017-05-24 16:10:34 +020084 }
85
86 token = (struct safexcel_token *)(cdesc->control_data.token + offset);
87
88 token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
89 token[0].packet_length = length;
Antoine Tenart15f64ee2018-03-19 09:21:18 +010090 token[0].stat = EIP197_TOKEN_STAT_LAST_PACKET |
91 EIP197_TOKEN_STAT_LAST_HASH;
Antoine Ténart1b44c5a2017-05-24 16:10:34 +020092 token[0].instructions = EIP197_TOKEN_INS_LAST |
93 EIP197_TOKEN_INS_TYPE_CRYTO |
94 EIP197_TOKEN_INS_TYPE_OUTPUT;
Antoine Tenart57660b12019-05-27 16:51:03 +020095
96 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) {
97 u32 last = (EIP197_MAX_TOKENS - 1) - offset;
98
99 token[last].opcode = EIP197_TOKEN_OPCODE_CTX_ACCESS;
100 token[last].packet_length = EIP197_TOKEN_DIRECTION_EXTERNAL |
101 EIP197_TOKEN_EXEC_IF_SUCCESSFUL|
102 EIP197_TOKEN_CTX_OFFSET(0x2);
103 token[last].stat = EIP197_TOKEN_STAT_LAST_HASH |
104 EIP197_TOKEN_STAT_LAST_PACKET;
105 token[last].instructions =
106 EIP197_TOKEN_INS_ORIGIN_LEN(block_sz / sizeof(u32)) |
107 EIP197_TOKEN_INS_ORIGIN_IV0;
108
109 /* Store the updated IV values back in the internal context
110 * registers.
111 */
112 cdesc->control_data.control1 |= CONTEXT_CONTROL_CRYPTO_STORE;
113 }
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200114}
115
Antoine Tenartf6beaea2018-05-14 15:11:02 +0200116static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
117 struct safexcel_command_desc *cdesc,
118 enum safexcel_cipher_direction direction,
119 u32 cryptlen, u32 assoclen, u32 digestsize)
120{
121 struct safexcel_token *token;
122 unsigned offset = 0;
123
124 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) {
125 offset = AES_BLOCK_SIZE / sizeof(u32);
126 memcpy(cdesc->control_data.token, iv, AES_BLOCK_SIZE);
127
128 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
129 }
130
131 token = (struct safexcel_token *)(cdesc->control_data.token + offset);
132
133 if (direction == SAFEXCEL_DECRYPT)
134 cryptlen -= digestsize;
135
136 token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
137 token[0].packet_length = assoclen;
138 token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH |
139 EIP197_TOKEN_INS_TYPE_OUTPUT;
140
141 token[1].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
142 token[1].packet_length = cryptlen;
143 token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
144 token[1].instructions = EIP197_TOKEN_INS_LAST |
145 EIP197_TOKEN_INS_TYPE_CRYTO |
146 EIP197_TOKEN_INS_TYPE_HASH |
147 EIP197_TOKEN_INS_TYPE_OUTPUT;
148
149 if (direction == SAFEXCEL_ENCRYPT) {
150 token[2].opcode = EIP197_TOKEN_OPCODE_INSERT;
151 token[2].packet_length = digestsize;
152 token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
153 EIP197_TOKEN_STAT_LAST_PACKET;
154 token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
155 EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
156 } else {
157 token[2].opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
158 token[2].packet_length = digestsize;
159 token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
160 EIP197_TOKEN_STAT_LAST_PACKET;
161 token[2].instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
162
163 token[3].opcode = EIP197_TOKEN_OPCODE_VERIFY;
164 token[3].packet_length = digestsize |
165 EIP197_TOKEN_HASH_RESULT_VERIFY;
166 token[3].stat = EIP197_TOKEN_STAT_LAST_HASH |
167 EIP197_TOKEN_STAT_LAST_PACKET;
168 token[3].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
169 }
170}
171
Antoine Tenart8ac12832018-05-14 15:10:56 +0200172static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
173 const u8 *key, unsigned int len)
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200174{
175 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
176 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
Antoine Ténart871df312017-12-14 15:26:58 +0100177 struct safexcel_crypto_priv *priv = ctx->priv;
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200178 struct crypto_aes_ctx aes;
179 int ret, i;
180
181 ret = crypto_aes_expand_key(&aes, key, len);
182 if (ret) {
183 crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
184 return ret;
185 }
186
Antoine Tenart53c83e92018-06-28 17:15:35 +0200187 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
Ofer Heifetzc4daf4c2017-12-14 15:26:47 +0100188 for (i = 0; i < len / sizeof(u32); i++) {
189 if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
190 ctx->base.needs_inv = true;
191 break;
192 }
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200193 }
194 }
195
196 for (i = 0; i < len / sizeof(u32); i++)
197 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
198
199 ctx->key_len = len;
200
201 memzero_explicit(&aes, sizeof(aes));
202 return 0;
203}
204
Antoine Tenartf6beaea2018-05-14 15:11:02 +0200205static int safexcel_aead_aes_setkey(struct crypto_aead *ctfm, const u8 *key,
206 unsigned int len)
207{
208 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
209 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
210 struct safexcel_ahash_export_state istate, ostate;
211 struct safexcel_crypto_priv *priv = ctx->priv;
212 struct crypto_authenc_keys keys;
213
214 if (crypto_authenc_extractkeys(&keys, key, len) != 0)
215 goto badkey;
216
217 if (keys.enckeylen > sizeof(ctx->key))
218 goto badkey;
219
220 /* Encryption key */
Antoine Tenart53c83e92018-06-28 17:15:35 +0200221 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
Antoine Tenartf6beaea2018-05-14 15:11:02 +0200222 memcmp(ctx->key, keys.enckey, keys.enckeylen))
223 ctx->base.needs_inv = true;
224
225 /* Auth key */
Ofer Heifetza7dea8c2018-06-28 17:21:55 +0200226 switch (ctx->hash_alg) {
Antoine Tenart01ba0612018-05-14 15:11:04 +0200227 case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
228 if (safexcel_hmac_setkey("safexcel-sha1", keys.authkey,
229 keys.authkeylen, &istate, &ostate))
230 goto badkey;
231 break;
Antoine Tenart678b2872018-05-14 15:11:03 +0200232 case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
233 if (safexcel_hmac_setkey("safexcel-sha224", keys.authkey,
234 keys.authkeylen, &istate, &ostate))
235 goto badkey;
236 break;
237 case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
238 if (safexcel_hmac_setkey("safexcel-sha256", keys.authkey,
239 keys.authkeylen, &istate, &ostate))
240 goto badkey;
241 break;
Antoine Tenartea23cb52018-05-29 14:13:52 +0200242 case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
243 if (safexcel_hmac_setkey("safexcel-sha384", keys.authkey,
244 keys.authkeylen, &istate, &ostate))
245 goto badkey;
246 break;
Antoine Tenart87eee122018-05-29 14:13:48 +0200247 case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
248 if (safexcel_hmac_setkey("safexcel-sha512", keys.authkey,
249 keys.authkeylen, &istate, &ostate))
250 goto badkey;
251 break;
Antoine Tenart678b2872018-05-14 15:11:03 +0200252 default:
253 dev_err(priv->dev, "aead: unsupported hash algorithm\n");
Antoine Tenartf6beaea2018-05-14 15:11:02 +0200254 goto badkey;
Antoine Tenart678b2872018-05-14 15:11:03 +0200255 }
Antoine Tenartf6beaea2018-05-14 15:11:02 +0200256
257 crypto_aead_set_flags(ctfm, crypto_aead_get_flags(ctfm) &
258 CRYPTO_TFM_RES_MASK);
259
Antoine Tenart53c83e92018-06-28 17:15:35 +0200260 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
Antoine Tenartf6beaea2018-05-14 15:11:02 +0200261 (memcmp(ctx->ipad, istate.state, ctx->state_sz) ||
262 memcmp(ctx->opad, ostate.state, ctx->state_sz)))
263 ctx->base.needs_inv = true;
264
265 /* Now copy the keys into the context */
266 memcpy(ctx->key, keys.enckey, keys.enckeylen);
267 ctx->key_len = keys.enckeylen;
268
269 memcpy(ctx->ipad, &istate.state, ctx->state_sz);
270 memcpy(ctx->opad, &ostate.state, ctx->state_sz);
271
272 memzero_explicit(&keys, sizeof(keys));
273 return 0;
274
275badkey:
276 crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
277 memzero_explicit(&keys, sizeof(keys));
278 return -EINVAL;
279}
280
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200281static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
Ofer Heifetz847ccfc2017-12-14 15:26:49 +0100282 struct crypto_async_request *async,
Antoine Tenart8ac12832018-05-14 15:10:56 +0200283 struct safexcel_cipher_req *sreq,
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200284 struct safexcel_command_desc *cdesc)
285{
286 struct safexcel_crypto_priv *priv = ctx->priv;
287 int ctrl_size;
288
Antoine Tenartf6beaea2018-05-14 15:11:02 +0200289 if (ctx->aead) {
290 if (sreq->direction == SAFEXCEL_ENCRYPT)
291 cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
292 else
293 cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
294 } else {
295 cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_CRYPTO_OUT;
Antoine Tenart3a5ca2302018-05-14 15:10:57 +0200296
Antoine Tenartf6beaea2018-05-14 15:11:02 +0200297 /* The decryption control type is a combination of the
298 * encryption type and CONTEXT_CONTROL_TYPE_NULL_IN, for all
299 * types.
300 */
301 if (sreq->direction == SAFEXCEL_DECRYPT)
302 cdesc->control_data.control0 |= CONTEXT_CONTROL_TYPE_NULL_IN;
303 }
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200304
305 cdesc->control_data.control0 |= CONTEXT_CONTROL_KEY_EN;
306 cdesc->control_data.control1 |= ctx->mode;
307
Antoine Tenartf6beaea2018-05-14 15:11:02 +0200308 if (ctx->aead)
309 cdesc->control_data.control0 |= CONTEXT_CONTROL_DIGEST_HMAC |
Ofer Heifetza7dea8c2018-06-28 17:21:55 +0200310 ctx->hash_alg;
Antoine Tenartf6beaea2018-05-14 15:11:02 +0200311
Ofer Heifetza7dea8c2018-06-28 17:21:55 +0200312 if (ctx->alg == SAFEXCEL_DES) {
313 cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_DES;
Ofer Heifetz624698792018-06-28 17:21:56 +0200314 } else if (ctx->alg == SAFEXCEL_3DES) {
315 cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_3DES;
Ofer Heifetza7dea8c2018-06-28 17:21:55 +0200316 } else if (ctx->alg == SAFEXCEL_AES) {
317 switch (ctx->key_len) {
318 case AES_KEYSIZE_128:
319 cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES128;
320 break;
321 case AES_KEYSIZE_192:
322 cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES192;
323 break;
324 case AES_KEYSIZE_256:
325 cdesc->control_data.control0 |= CONTEXT_CONTROL_CRYPTO_ALG_AES256;
326 break;
327 default:
328 dev_err(priv->dev, "aes keysize not supported: %u\n",
329 ctx->key_len);
330 return -EINVAL;
331 }
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200332 }
Antoine Tenartfef0cfe2018-05-14 15:10:58 +0200333
334 ctrl_size = ctx->key_len / sizeof(u32);
Antoine Tenartf6beaea2018-05-14 15:11:02 +0200335 if (ctx->aead)
336 /* Take in account the ipad+opad digests */
337 ctrl_size += ctx->state_sz / sizeof(u32) * 2;
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200338 cdesc->control_data.control0 |= CONTEXT_CONTROL_SIZE(ctrl_size);
339
340 return 0;
341}
342
Ofer Heifetz1eb7b402017-12-11 12:10:55 +0100343static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
344 struct crypto_async_request *async,
Antoine Tenart8ac12832018-05-14 15:10:56 +0200345 struct scatterlist *src,
346 struct scatterlist *dst,
347 unsigned int cryptlen,
348 struct safexcel_cipher_req *sreq,
Ofer Heifetz1eb7b402017-12-11 12:10:55 +0100349 bool *should_complete, int *ret)
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200350{
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200351 struct safexcel_result_desc *rdesc;
352 int ndesc = 0;
353
354 *ret = 0;
355
Antoine Tenart89332592019-05-27 16:51:06 +0200356 if (unlikely(!sreq->rdescs))
357 return 0;
358
359 while (sreq->rdescs--) {
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200360 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
361 if (IS_ERR(rdesc)) {
362 dev_err(priv->dev,
363 "cipher: result: could not retrieve the result descriptor\n");
364 *ret = PTR_ERR(rdesc);
365 break;
366 }
367
Antoine Tenartbdfd1902018-05-14 15:11:01 +0200368 if (likely(!*ret))
369 *ret = safexcel_rdesc_check_errors(priv, rdesc);
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200370
371 ndesc++;
Antoine Tenart89332592019-05-27 16:51:06 +0200372 }
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200373
374 safexcel_complete(priv, ring);
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200375
Antoine Tenart8ac12832018-05-14 15:10:56 +0200376 if (src == dst) {
Antoine Tenart583d7e12019-05-27 16:51:05 +0200377 dma_unmap_sg(priv->dev, src, sg_nents(src), DMA_BIDIRECTIONAL);
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200378 } else {
Antoine Tenart583d7e12019-05-27 16:51:05 +0200379 dma_unmap_sg(priv->dev, src, sg_nents(src), DMA_TO_DEVICE);
380 dma_unmap_sg(priv->dev, dst, sg_nents(dst), DMA_FROM_DEVICE);
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200381 }
382
383 *should_complete = true;
384
385 return ndesc;
386}
387
Ofer Heifetza7dea8c2018-06-28 17:21:55 +0200388static int safexcel_send_req(struct crypto_async_request *base, int ring,
Antoine Tenart8ac12832018-05-14 15:10:56 +0200389 struct safexcel_cipher_req *sreq,
390 struct scatterlist *src, struct scatterlist *dst,
Antoine Tenartf6beaea2018-05-14 15:11:02 +0200391 unsigned int cryptlen, unsigned int assoclen,
392 unsigned int digestsize, u8 *iv, int *commands,
Antoine Tenart8ac12832018-05-14 15:10:56 +0200393 int *results)
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200394{
Antoine Tenart8ac12832018-05-14 15:10:56 +0200395 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200396 struct safexcel_crypto_priv *priv = ctx->priv;
397 struct safexcel_command_desc *cdesc;
Antoine Tenarte5c8ee12018-07-13 17:43:16 +0200398 struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200399 struct scatterlist *sg;
Antoine Tenartf6beaea2018-05-14 15:11:02 +0200400 unsigned int totlen = cryptlen + assoclen;
401 int nr_src, nr_dst, n_cdesc = 0, n_rdesc = 0, queued = totlen;
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200402 int i, ret = 0;
403
Antoine Tenart8ac12832018-05-14 15:10:56 +0200404 if (src == dst) {
Antoine Tenart583d7e12019-05-27 16:51:05 +0200405 nr_src = dma_map_sg(priv->dev, src, sg_nents(src),
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200406 DMA_BIDIRECTIONAL);
407 nr_dst = nr_src;
408 if (!nr_src)
409 return -EINVAL;
410 } else {
Antoine Tenart583d7e12019-05-27 16:51:05 +0200411 nr_src = dma_map_sg(priv->dev, src, sg_nents(src),
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200412 DMA_TO_DEVICE);
413 if (!nr_src)
414 return -EINVAL;
415
Antoine Tenart583d7e12019-05-27 16:51:05 +0200416 nr_dst = dma_map_sg(priv->dev, dst, sg_nents(dst),
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200417 DMA_FROM_DEVICE);
418 if (!nr_dst) {
Antoine Tenart583d7e12019-05-27 16:51:05 +0200419 dma_unmap_sg(priv->dev, src, nr_src, DMA_TO_DEVICE);
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200420 return -EINVAL;
421 }
422 }
423
424 memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
425
Antoine Tenartf6beaea2018-05-14 15:11:02 +0200426 if (ctx->aead) {
427 memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
428 ctx->ipad, ctx->state_sz);
429 memcpy(ctx->base.ctxr->data + (ctx->key_len + ctx->state_sz) / sizeof(u32),
430 ctx->opad, ctx->state_sz);
431 }
432
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200433 /* command descriptors */
Antoine Tenart8ac12832018-05-14 15:10:56 +0200434 for_each_sg(src, sg, nr_src, i) {
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200435 int len = sg_dma_len(sg);
436
437 /* Do not overflow the request */
438 if (queued - len < 0)
439 len = queued;
440
441 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc, !(queued - len),
Antoine Tenartf6beaea2018-05-14 15:11:02 +0200442 sg_dma_address(sg), len, totlen,
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200443 ctx->base.ctxr_dma);
444 if (IS_ERR(cdesc)) {
445 /* No space left in the command descriptor ring */
446 ret = PTR_ERR(cdesc);
447 goto cdesc_rollback;
448 }
449 n_cdesc++;
450
451 if (n_cdesc == 1) {
Antoine Tenart8ac12832018-05-14 15:10:56 +0200452 safexcel_context_control(ctx, base, sreq, cdesc);
Antoine Tenartf6beaea2018-05-14 15:11:02 +0200453 if (ctx->aead)
454 safexcel_aead_token(ctx, iv, cdesc,
455 sreq->direction, cryptlen,
456 assoclen, digestsize);
457 else
458 safexcel_skcipher_token(ctx, iv, cdesc,
459 cryptlen);
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200460 }
461
462 queued -= len;
463 if (!queued)
464 break;
465 }
466
467 /* result descriptors */
Antoine Tenart8ac12832018-05-14 15:10:56 +0200468 for_each_sg(dst, sg, nr_dst, i) {
Antoine Tenart583d7e12019-05-27 16:51:05 +0200469 bool first = !i, last = sg_is_last(sg);
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200470 u32 len = sg_dma_len(sg);
471
472 rdesc = safexcel_add_rdesc(priv, ring, first, last,
473 sg_dma_address(sg), len);
474 if (IS_ERR(rdesc)) {
475 /* No space left in the result descriptor ring */
476 ret = PTR_ERR(rdesc);
477 goto rdesc_rollback;
478 }
Ofer Heifetz9744fec2018-06-28 17:21:57 +0200479 if (first)
480 first_rdesc = rdesc;
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200481 n_rdesc++;
482 }
483
Ofer Heifetz9744fec2018-06-28 17:21:57 +0200484 safexcel_rdr_req_set(priv, ring, first_rdesc, base);
Antoine Ténart97858432017-06-15 09:56:23 +0200485
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200486 *commands = n_cdesc;
Ofer Heifetz152bdf4c2017-06-15 09:56:22 +0200487 *results = n_rdesc;
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200488 return 0;
489
490rdesc_rollback:
491 for (i = 0; i < n_rdesc; i++)
492 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
493cdesc_rollback:
494 for (i = 0; i < n_cdesc; i++)
495 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
496
Antoine Tenart8ac12832018-05-14 15:10:56 +0200497 if (src == dst) {
Antoine Tenart583d7e12019-05-27 16:51:05 +0200498 dma_unmap_sg(priv->dev, src, nr_src, DMA_BIDIRECTIONAL);
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200499 } else {
Antoine Tenart583d7e12019-05-27 16:51:05 +0200500 dma_unmap_sg(priv->dev, src, nr_src, DMA_TO_DEVICE);
501 dma_unmap_sg(priv->dev, dst, nr_dst, DMA_FROM_DEVICE);
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200502 }
503
504 return ret;
505}
506
507static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
508 int ring,
Antoine Tenart8ac12832018-05-14 15:10:56 +0200509 struct crypto_async_request *base,
Antoine Tenart89332592019-05-27 16:51:06 +0200510 struct safexcel_cipher_req *sreq,
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200511 bool *should_complete, int *ret)
512{
Antoine Tenart8ac12832018-05-14 15:10:56 +0200513 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200514 struct safexcel_result_desc *rdesc;
515 int ndesc = 0, enq_ret;
516
517 *ret = 0;
518
Antoine Tenart89332592019-05-27 16:51:06 +0200519 if (unlikely(!sreq->rdescs))
520 return 0;
521
522 while (sreq->rdescs--) {
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200523 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
524 if (IS_ERR(rdesc)) {
525 dev_err(priv->dev,
526 "cipher: invalidate: could not retrieve the result descriptor\n");
527 *ret = PTR_ERR(rdesc);
528 break;
529 }
530
Antoine Tenartcda3e732018-05-29 14:13:43 +0200531 if (likely(!*ret))
532 *ret = safexcel_rdesc_check_errors(priv, rdesc);
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200533
534 ndesc++;
Antoine Tenart89332592019-05-27 16:51:06 +0200535 }
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200536
537 safexcel_complete(priv, ring);
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200538
539 if (ctx->base.exit_inv) {
540 dma_pool_free(priv->context_pool, ctx->base.ctxr,
541 ctx->base.ctxr_dma);
542
543 *should_complete = true;
544
545 return ndesc;
546 }
547
Antoine Ténart86671ab2017-06-15 09:56:24 +0200548 ring = safexcel_select_ring(priv);
549 ctx->base.ring = ring;
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200550
Antoine Ténart86671ab2017-06-15 09:56:24 +0200551 spin_lock_bh(&priv->ring[ring].queue_lock);
Antoine Tenart8ac12832018-05-14 15:10:56 +0200552 enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
Antoine Ténart86671ab2017-06-15 09:56:24 +0200553 spin_unlock_bh(&priv->ring[ring].queue_lock);
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200554
555 if (enq_ret != -EINPROGRESS)
556 *ret = enq_ret;
557
Antoine Ténart8472e772017-12-14 15:26:51 +0100558 queue_work(priv->ring[ring].workqueue,
559 &priv->ring[ring].work_data.work);
Antoine Ténart86671ab2017-06-15 09:56:24 +0200560
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200561 *should_complete = false;
562
563 return ndesc;
564}
565
Antoine Tenart8ac12832018-05-14 15:10:56 +0200566static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
567 int ring,
568 struct crypto_async_request *async,
569 bool *should_complete, int *ret)
Ofer Heifetz1eb7b402017-12-11 12:10:55 +0100570{
571 struct skcipher_request *req = skcipher_request_cast(async);
572 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
Antoine Tenart57660b12019-05-27 16:51:03 +0200573 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(async->tfm);
Ofer Heifetz1eb7b402017-12-11 12:10:55 +0100574 int err;
575
576 if (sreq->needs_inv) {
577 sreq->needs_inv = false;
Antoine Tenart89332592019-05-27 16:51:06 +0200578 err = safexcel_handle_inv_result(priv, ring, async, sreq,
Ofer Heifetz1eb7b402017-12-11 12:10:55 +0100579 should_complete, ret);
580 } else {
Antoine Tenart8ac12832018-05-14 15:10:56 +0200581 err = safexcel_handle_req_result(priv, ring, async, req->src,
582 req->dst, req->cryptlen, sreq,
Ofer Heifetz1eb7b402017-12-11 12:10:55 +0100583 should_complete, ret);
Antoine Tenart57660b12019-05-27 16:51:03 +0200584
585 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) {
586 u32 block_sz = 0;
587
588 switch (ctx->alg) {
589 case SAFEXCEL_DES:
590 block_sz = DES_BLOCK_SIZE;
591 break;
592 case SAFEXCEL_3DES:
593 block_sz = DES3_EDE_BLOCK_SIZE;
594 break;
595 case SAFEXCEL_AES:
596 block_sz = AES_BLOCK_SIZE;
597 break;
598 }
599
600 memcpy(req->iv, ctx->base.ctxr->data, block_sz);
601 }
Ofer Heifetz1eb7b402017-12-11 12:10:55 +0100602 }
603
604 return err;
605}
606
Antoine Tenartf6beaea2018-05-14 15:11:02 +0200607static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
608 int ring,
609 struct crypto_async_request *async,
610 bool *should_complete, int *ret)
611{
612 struct aead_request *req = aead_request_cast(async);
613 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
614 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
615 int err;
616
617 if (sreq->needs_inv) {
618 sreq->needs_inv = false;
Antoine Tenart89332592019-05-27 16:51:06 +0200619 err = safexcel_handle_inv_result(priv, ring, async, sreq,
Antoine Tenartf6beaea2018-05-14 15:11:02 +0200620 should_complete, ret);
621 } else {
622 err = safexcel_handle_req_result(priv, ring, async, req->src,
623 req->dst,
624 req->cryptlen + crypto_aead_authsize(tfm),
625 sreq, should_complete, ret);
626 }
627
628 return err;
629}
630
Antoine Tenart8ac12832018-05-14 15:10:56 +0200631static int safexcel_cipher_send_inv(struct crypto_async_request *base,
Ofer Heifetz9744fec2018-06-28 17:21:57 +0200632 int ring, int *commands, int *results)
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200633{
Antoine Tenart8ac12832018-05-14 15:10:56 +0200634 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200635 struct safexcel_crypto_priv *priv = ctx->priv;
636 int ret;
637
Ofer Heifetz9744fec2018-06-28 17:21:57 +0200638 ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200639 if (unlikely(ret))
640 return ret;
641
642 *commands = 1;
643 *results = 1;
644
645 return 0;
646}
647
Antoine Tenart8ac12832018-05-14 15:10:56 +0200648static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
Antoine Tenart8ac12832018-05-14 15:10:56 +0200649 int *commands, int *results)
Ofer Heifetz1eb7b402017-12-11 12:10:55 +0100650{
651 struct skcipher_request *req = skcipher_request_cast(async);
Antoine Ténart871df312017-12-14 15:26:58 +0100652 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
Ofer Heifetz1eb7b402017-12-11 12:10:55 +0100653 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
Antoine Ténart871df312017-12-14 15:26:58 +0100654 struct safexcel_crypto_priv *priv = ctx->priv;
Ofer Heifetz1eb7b402017-12-11 12:10:55 +0100655 int ret;
656
Antoine Tenart53c83e92018-06-28 17:15:35 +0200657 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
Antoine Ténart871df312017-12-14 15:26:58 +0100658
Ofer Heifetz1eb7b402017-12-11 12:10:55 +0100659 if (sreq->needs_inv)
Ofer Heifetz9744fec2018-06-28 17:21:57 +0200660 ret = safexcel_cipher_send_inv(async, ring, commands, results);
Ofer Heifetz1eb7b402017-12-11 12:10:55 +0100661 else
Ofer Heifetz9744fec2018-06-28 17:21:57 +0200662 ret = safexcel_send_req(async, ring, sreq, req->src,
Antoine Tenartf6beaea2018-05-14 15:11:02 +0200663 req->dst, req->cryptlen, 0, 0, req->iv,
664 commands, results);
Antoine Tenart89332592019-05-27 16:51:06 +0200665
666 sreq->rdescs = *results;
Antoine Tenartf6beaea2018-05-14 15:11:02 +0200667 return ret;
668}
669
670static int safexcel_aead_send(struct crypto_async_request *async, int ring,
Ofer Heifetz9744fec2018-06-28 17:21:57 +0200671 int *commands, int *results)
Antoine Tenartf6beaea2018-05-14 15:11:02 +0200672{
673 struct aead_request *req = aead_request_cast(async);
674 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
675 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
676 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
677 struct safexcel_crypto_priv *priv = ctx->priv;
678 int ret;
679
Antoine Tenart53c83e92018-06-28 17:15:35 +0200680 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
Antoine Tenartf6beaea2018-05-14 15:11:02 +0200681
682 if (sreq->needs_inv)
Ofer Heifetz9744fec2018-06-28 17:21:57 +0200683 ret = safexcel_cipher_send_inv(async, ring, commands, results);
Antoine Tenartf6beaea2018-05-14 15:11:02 +0200684 else
Ofer Heifetz9744fec2018-06-28 17:21:57 +0200685 ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
686 req->cryptlen, req->assoclen,
Antoine Tenartf6beaea2018-05-14 15:11:02 +0200687 crypto_aead_authsize(tfm), req->iv,
Ofer Heifetz1eb7b402017-12-11 12:10:55 +0100688 commands, results);
Antoine Tenart89332592019-05-27 16:51:06 +0200689 sreq->rdescs = *results;
Ofer Heifetz1eb7b402017-12-11 12:10:55 +0100690 return ret;
691}
692
Antoine Tenart8ac12832018-05-14 15:10:56 +0200693static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
694 struct crypto_async_request *base,
695 struct safexcel_cipher_req *sreq,
696 struct safexcel_inv_result *result)
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200697{
698 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
699 struct safexcel_crypto_priv *priv = ctx->priv;
Antoine Ténart86671ab2017-06-15 09:56:24 +0200700 int ring = ctx->base.ring;
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200701
Antoine Tenart8ac12832018-05-14 15:10:56 +0200702 init_completion(&result->completion);
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200703
Antoine Tenart8ac12832018-05-14 15:10:56 +0200704 ctx = crypto_tfm_ctx(base->tfm);
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200705 ctx->base.exit_inv = true;
Ofer Heifetz1eb7b402017-12-11 12:10:55 +0100706 sreq->needs_inv = true;
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200707
Antoine Ténart86671ab2017-06-15 09:56:24 +0200708 spin_lock_bh(&priv->ring[ring].queue_lock);
Antoine Tenart8ac12832018-05-14 15:10:56 +0200709 crypto_enqueue_request(&priv->ring[ring].queue, base);
Antoine Ténart86671ab2017-06-15 09:56:24 +0200710 spin_unlock_bh(&priv->ring[ring].queue_lock);
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200711
Antoine Ténart8472e772017-12-14 15:26:51 +0100712 queue_work(priv->ring[ring].workqueue,
713 &priv->ring[ring].work_data.work);
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200714
Antoine Tenart8ac12832018-05-14 15:10:56 +0200715 wait_for_completion(&result->completion);
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200716
Antoine Tenart8ac12832018-05-14 15:10:56 +0200717 if (result->error) {
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200718 dev_warn(priv->dev,
719 "cipher: sync: invalidate: completion error %d\n",
Antoine Tenart8ac12832018-05-14 15:10:56 +0200720 result->error);
721 return result->error;
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200722 }
723
724 return 0;
725}
726
Antoine Tenart8ac12832018-05-14 15:10:56 +0200727static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
728{
729 EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
730 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
731 struct safexcel_inv_result result = {};
732
733 memset(req, 0, sizeof(struct skcipher_request));
734
735 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
736 safexcel_inv_complete, &result);
737 skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
738
739 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
740}
741
Antoine Tenartf6beaea2018-05-14 15:11:02 +0200742static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
743{
744 EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
745 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
746 struct safexcel_inv_result result = {};
747
748 memset(req, 0, sizeof(struct aead_request));
749
750 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
751 safexcel_inv_complete, &result);
752 aead_request_set_tfm(req, __crypto_aead_cast(tfm));
753
754 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
755}
756
Ofer Heifetza7dea8c2018-06-28 17:21:55 +0200757static int safexcel_queue_req(struct crypto_async_request *base,
Antoine Tenart8ac12832018-05-14 15:10:56 +0200758 struct safexcel_cipher_req *sreq,
Ofer Heifetza7dea8c2018-06-28 17:21:55 +0200759 enum safexcel_cipher_direction dir, u32 mode,
760 enum safexcel_cipher_alg alg)
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200761{
Antoine Tenart8ac12832018-05-14 15:10:56 +0200762 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200763 struct safexcel_crypto_priv *priv = ctx->priv;
Antoine Ténart86671ab2017-06-15 09:56:24 +0200764 int ret, ring;
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200765
Ofer Heifetz1eb7b402017-12-11 12:10:55 +0100766 sreq->needs_inv = false;
Ofer Heifetz847ccfc2017-12-14 15:26:49 +0100767 sreq->direction = dir;
Ofer Heifetza7dea8c2018-06-28 17:21:55 +0200768 ctx->alg = alg;
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200769 ctx->mode = mode;
770
771 if (ctx->base.ctxr) {
Antoine Tenart53c83e92018-06-28 17:15:35 +0200772 if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
Ofer Heifetz1eb7b402017-12-11 12:10:55 +0100773 sreq->needs_inv = true;
774 ctx->base.needs_inv = false;
775 }
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200776 } else {
777 ctx->base.ring = safexcel_select_ring(priv);
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200778 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
Antoine Tenart8ac12832018-05-14 15:10:56 +0200779 EIP197_GFP_FLAGS(*base),
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200780 &ctx->base.ctxr_dma);
781 if (!ctx->base.ctxr)
782 return -ENOMEM;
783 }
784
Antoine Ténart86671ab2017-06-15 09:56:24 +0200785 ring = ctx->base.ring;
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200786
Antoine Ténart86671ab2017-06-15 09:56:24 +0200787 spin_lock_bh(&priv->ring[ring].queue_lock);
Antoine Tenart8ac12832018-05-14 15:10:56 +0200788 ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
Antoine Ténart86671ab2017-06-15 09:56:24 +0200789 spin_unlock_bh(&priv->ring[ring].queue_lock);
790
Antoine Ténart8472e772017-12-14 15:26:51 +0100791 queue_work(priv->ring[ring].workqueue,
792 &priv->ring[ring].work_data.work);
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200793
794 return ret;
795}
796
797static int safexcel_ecb_aes_encrypt(struct skcipher_request *req)
798{
Ofer Heifetza7dea8c2018-06-28 17:21:55 +0200799 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
800 SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
801 SAFEXCEL_AES);
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200802}
803
804static int safexcel_ecb_aes_decrypt(struct skcipher_request *req)
805{
Ofer Heifetza7dea8c2018-06-28 17:21:55 +0200806 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
807 SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
808 SAFEXCEL_AES);
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200809}
810
811static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
812{
813 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
814 struct safexcel_alg_template *tmpl =
815 container_of(tfm->__crt_alg, struct safexcel_alg_template,
816 alg.skcipher.base);
817
Ofer Heifetz1eb7b402017-12-11 12:10:55 +0100818 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
819 sizeof(struct safexcel_cipher_req));
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200820
Antoine Tenart8ac12832018-05-14 15:10:56 +0200821 ctx->priv = tmpl->priv;
822
823 ctx->base.send = safexcel_skcipher_send;
824 ctx->base.handle_result = safexcel_skcipher_handle_result;
825 return 0;
826}
827
828static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
829{
830 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
831
Antoine Tenartce679552018-05-14 15:10:59 +0200832 memzero_explicit(ctx->key, sizeof(ctx->key));
Antoine Tenart8ac12832018-05-14 15:10:56 +0200833
834 /* context not allocated, skip invalidation */
835 if (!ctx->base.ctxr)
836 return -ENOMEM;
837
Antoine Tenartce679552018-05-14 15:10:59 +0200838 memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200839 return 0;
840}
841
842static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
843{
844 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
845 struct safexcel_crypto_priv *priv = ctx->priv;
846 int ret;
847
Antoine Tenart8ac12832018-05-14 15:10:56 +0200848 if (safexcel_cipher_cra_exit(tfm))
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200849 return;
850
Antoine Tenart53c83e92018-06-28 17:15:35 +0200851 if (priv->flags & EIP197_TRC_CACHE) {
Antoine Tenart8ac12832018-05-14 15:10:56 +0200852 ret = safexcel_skcipher_exit_inv(tfm);
Antoine Ténart871df312017-12-14 15:26:58 +0100853 if (ret)
Antoine Tenart8ac12832018-05-14 15:10:56 +0200854 dev_warn(priv->dev, "skcipher: invalidation error %d\n",
855 ret);
Antoine Ténart871df312017-12-14 15:26:58 +0100856 } else {
857 dma_pool_free(priv->context_pool, ctx->base.ctxr,
858 ctx->base.ctxr_dma);
859 }
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200860}
861
Antoine Tenartf6beaea2018-05-14 15:11:02 +0200862static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
863{
864 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
865 struct safexcel_crypto_priv *priv = ctx->priv;
866 int ret;
867
868 if (safexcel_cipher_cra_exit(tfm))
869 return;
870
Antoine Tenart53c83e92018-06-28 17:15:35 +0200871 if (priv->flags & EIP197_TRC_CACHE) {
Antoine Tenartf6beaea2018-05-14 15:11:02 +0200872 ret = safexcel_aead_exit_inv(tfm);
873 if (ret)
874 dev_warn(priv->dev, "aead: invalidation error %d\n",
875 ret);
876 } else {
877 dma_pool_free(priv->context_pool, ctx->base.ctxr,
878 ctx->base.ctxr_dma);
879 }
880}
881
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200882struct safexcel_alg_template safexcel_alg_ecb_aes = {
883 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
Antoine Tenart5eb09112018-06-28 17:15:38 +0200884 .engines = EIP97IES | EIP197B | EIP197D,
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200885 .alg.skcipher = {
Antoine Tenart8ac12832018-05-14 15:10:56 +0200886 .setkey = safexcel_skcipher_aes_setkey,
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200887 .encrypt = safexcel_ecb_aes_encrypt,
888 .decrypt = safexcel_ecb_aes_decrypt,
889 .min_keysize = AES_MIN_KEY_SIZE,
890 .max_keysize = AES_MAX_KEY_SIZE,
891 .base = {
892 .cra_name = "ecb(aes)",
893 .cra_driver_name = "safexcel-ecb-aes",
894 .cra_priority = 300,
Eric Biggers2c95e6d2018-06-30 15:16:15 -0700895 .cra_flags = CRYPTO_ALG_ASYNC |
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200896 CRYPTO_ALG_KERN_DRIVER_ONLY,
897 .cra_blocksize = AES_BLOCK_SIZE,
898 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
899 .cra_alignmask = 0,
900 .cra_init = safexcel_skcipher_cra_init,
901 .cra_exit = safexcel_skcipher_cra_exit,
902 .cra_module = THIS_MODULE,
903 },
904 },
905};
906
907static int safexcel_cbc_aes_encrypt(struct skcipher_request *req)
908{
Ofer Heifetza7dea8c2018-06-28 17:21:55 +0200909 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
910 SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
911 SAFEXCEL_AES);
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200912}
913
914static int safexcel_cbc_aes_decrypt(struct skcipher_request *req)
915{
Ofer Heifetza7dea8c2018-06-28 17:21:55 +0200916 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
917 SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
918 SAFEXCEL_AES);
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200919}
920
921struct safexcel_alg_template safexcel_alg_cbc_aes = {
922 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
Antoine Tenart5eb09112018-06-28 17:15:38 +0200923 .engines = EIP97IES | EIP197B | EIP197D,
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200924 .alg.skcipher = {
Antoine Tenart8ac12832018-05-14 15:10:56 +0200925 .setkey = safexcel_skcipher_aes_setkey,
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200926 .encrypt = safexcel_cbc_aes_encrypt,
927 .decrypt = safexcel_cbc_aes_decrypt,
928 .min_keysize = AES_MIN_KEY_SIZE,
929 .max_keysize = AES_MAX_KEY_SIZE,
930 .ivsize = AES_BLOCK_SIZE,
931 .base = {
932 .cra_name = "cbc(aes)",
933 .cra_driver_name = "safexcel-cbc-aes",
934 .cra_priority = 300,
Eric Biggers2c95e6d2018-06-30 15:16:15 -0700935 .cra_flags = CRYPTO_ALG_ASYNC |
Antoine Ténart1b44c5a2017-05-24 16:10:34 +0200936 CRYPTO_ALG_KERN_DRIVER_ONLY,
937 .cra_blocksize = AES_BLOCK_SIZE,
938 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
939 .cra_alignmask = 0,
940 .cra_init = safexcel_skcipher_cra_init,
941 .cra_exit = safexcel_skcipher_cra_exit,
942 .cra_module = THIS_MODULE,
943 },
944 },
945};
Antoine Tenartf6beaea2018-05-14 15:11:02 +0200946
Ofer Heifetza7dea8c2018-06-28 17:21:55 +0200947static int safexcel_cbc_des_encrypt(struct skcipher_request *req)
948{
949 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
950 SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
951 SAFEXCEL_DES);
952}
953
954static int safexcel_cbc_des_decrypt(struct skcipher_request *req)
955{
956 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
957 SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
958 SAFEXCEL_DES);
959}
960
961static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
962 unsigned int len)
963{
964 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
965 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
966 u32 tmp[DES_EXPKEY_WORDS];
967 int ret;
968
969 if (len != DES_KEY_SIZE) {
970 crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
971 return -EINVAL;
972 }
973
974 ret = des_ekey(tmp, key);
Eric Biggers231baec2019-01-18 22:48:00 -0800975 if (!ret && (tfm->crt_flags & CRYPTO_TFM_REQ_FORBID_WEAK_KEYS)) {
Ofer Heifetza7dea8c2018-06-28 17:21:55 +0200976 tfm->crt_flags |= CRYPTO_TFM_RES_WEAK_KEY;
977 return -EINVAL;
978 }
979
980 /* if context exits and key changed, need to invalidate it */
981 if (ctx->base.ctxr_dma)
982 if (memcmp(ctx->key, key, len))
983 ctx->base.needs_inv = true;
984
985 memcpy(ctx->key, key, len);
986 ctx->key_len = len;
987
988 return 0;
989}
990
991struct safexcel_alg_template safexcel_alg_cbc_des = {
992 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
993 .engines = EIP97IES | EIP197B | EIP197D,
994 .alg.skcipher = {
995 .setkey = safexcel_des_setkey,
996 .encrypt = safexcel_cbc_des_encrypt,
997 .decrypt = safexcel_cbc_des_decrypt,
998 .min_keysize = DES_KEY_SIZE,
999 .max_keysize = DES_KEY_SIZE,
1000 .ivsize = DES_BLOCK_SIZE,
1001 .base = {
1002 .cra_name = "cbc(des)",
1003 .cra_driver_name = "safexcel-cbc-des",
1004 .cra_priority = 300,
Eric Biggers2b78aeb2018-11-14 11:10:53 -08001005 .cra_flags = CRYPTO_ALG_ASYNC |
Ofer Heifetza7dea8c2018-06-28 17:21:55 +02001006 CRYPTO_ALG_KERN_DRIVER_ONLY,
1007 .cra_blocksize = DES_BLOCK_SIZE,
1008 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1009 .cra_alignmask = 0,
1010 .cra_init = safexcel_skcipher_cra_init,
1011 .cra_exit = safexcel_skcipher_cra_exit,
1012 .cra_module = THIS_MODULE,
1013 },
1014 },
1015};
1016
1017static int safexcel_ecb_des_encrypt(struct skcipher_request *req)
1018{
1019 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1020 SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
1021 SAFEXCEL_DES);
1022}
1023
1024static int safexcel_ecb_des_decrypt(struct skcipher_request *req)
1025{
1026 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1027 SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
1028 SAFEXCEL_DES);
1029}
1030
1031struct safexcel_alg_template safexcel_alg_ecb_des = {
1032 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1033 .engines = EIP97IES | EIP197B | EIP197D,
1034 .alg.skcipher = {
1035 .setkey = safexcel_des_setkey,
1036 .encrypt = safexcel_ecb_des_encrypt,
1037 .decrypt = safexcel_ecb_des_decrypt,
1038 .min_keysize = DES_KEY_SIZE,
1039 .max_keysize = DES_KEY_SIZE,
1040 .ivsize = DES_BLOCK_SIZE,
1041 .base = {
1042 .cra_name = "ecb(des)",
1043 .cra_driver_name = "safexcel-ecb-des",
1044 .cra_priority = 300,
Eric Biggers2b78aeb2018-11-14 11:10:53 -08001045 .cra_flags = CRYPTO_ALG_ASYNC |
Ofer Heifetza7dea8c2018-06-28 17:21:55 +02001046 CRYPTO_ALG_KERN_DRIVER_ONLY,
1047 .cra_blocksize = DES_BLOCK_SIZE,
1048 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1049 .cra_alignmask = 0,
1050 .cra_init = safexcel_skcipher_cra_init,
1051 .cra_exit = safexcel_skcipher_cra_exit,
1052 .cra_module = THIS_MODULE,
1053 },
1054 },
1055};
Ofer Heifetz624698792018-06-28 17:21:56 +02001056
1057static int safexcel_cbc_des3_ede_encrypt(struct skcipher_request *req)
1058{
1059 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1060 SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
1061 SAFEXCEL_3DES);
1062}
1063
1064static int safexcel_cbc_des3_ede_decrypt(struct skcipher_request *req)
1065{
1066 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1067 SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_CBC,
1068 SAFEXCEL_3DES);
1069}
1070
1071static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
1072 const u8 *key, unsigned int len)
1073{
Herbert Xu67ac62b2019-04-11 16:51:10 +08001074 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1075 int err;
Ofer Heifetz624698792018-06-28 17:21:56 +02001076
Herbert Xu67ac62b2019-04-11 16:51:10 +08001077 err = des3_verify_key(ctfm, key);
1078 if (unlikely(err))
1079 return err;
Ofer Heifetz624698792018-06-28 17:21:56 +02001080
1081 /* if context exits and key changed, need to invalidate it */
1082 if (ctx->base.ctxr_dma) {
1083 if (memcmp(ctx->key, key, len))
1084 ctx->base.needs_inv = true;
1085 }
1086
1087 memcpy(ctx->key, key, len);
1088
1089 ctx->key_len = len;
1090
1091 return 0;
1092}
1093
1094struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
1095 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1096 .engines = EIP97IES | EIP197B | EIP197D,
1097 .alg.skcipher = {
1098 .setkey = safexcel_des3_ede_setkey,
1099 .encrypt = safexcel_cbc_des3_ede_encrypt,
1100 .decrypt = safexcel_cbc_des3_ede_decrypt,
1101 .min_keysize = DES3_EDE_KEY_SIZE,
1102 .max_keysize = DES3_EDE_KEY_SIZE,
1103 .ivsize = DES3_EDE_BLOCK_SIZE,
1104 .base = {
1105 .cra_name = "cbc(des3_ede)",
1106 .cra_driver_name = "safexcel-cbc-des3_ede",
1107 .cra_priority = 300,
Eric Biggers2b78aeb2018-11-14 11:10:53 -08001108 .cra_flags = CRYPTO_ALG_ASYNC |
Ofer Heifetz624698792018-06-28 17:21:56 +02001109 CRYPTO_ALG_KERN_DRIVER_ONLY,
1110 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1111 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1112 .cra_alignmask = 0,
1113 .cra_init = safexcel_skcipher_cra_init,
1114 .cra_exit = safexcel_skcipher_cra_exit,
1115 .cra_module = THIS_MODULE,
1116 },
1117 },
1118};
1119
1120static int safexcel_ecb_des3_ede_encrypt(struct skcipher_request *req)
1121{
1122 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1123 SAFEXCEL_ENCRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
1124 SAFEXCEL_3DES);
1125}
1126
1127static int safexcel_ecb_des3_ede_decrypt(struct skcipher_request *req)
1128{
1129 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1130 SAFEXCEL_DECRYPT, CONTEXT_CONTROL_CRYPTO_MODE_ECB,
1131 SAFEXCEL_3DES);
1132}
1133
1134struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
1135 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1136 .engines = EIP97IES | EIP197B | EIP197D,
1137 .alg.skcipher = {
1138 .setkey = safexcel_des3_ede_setkey,
1139 .encrypt = safexcel_ecb_des3_ede_encrypt,
1140 .decrypt = safexcel_ecb_des3_ede_decrypt,
1141 .min_keysize = DES3_EDE_KEY_SIZE,
1142 .max_keysize = DES3_EDE_KEY_SIZE,
1143 .ivsize = DES3_EDE_BLOCK_SIZE,
1144 .base = {
1145 .cra_name = "ecb(des3_ede)",
1146 .cra_driver_name = "safexcel-ecb-des3_ede",
1147 .cra_priority = 300,
Eric Biggers2b78aeb2018-11-14 11:10:53 -08001148 .cra_flags = CRYPTO_ALG_ASYNC |
Ofer Heifetz624698792018-06-28 17:21:56 +02001149 CRYPTO_ALG_KERN_DRIVER_ONLY,
1150 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1151 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1152 .cra_alignmask = 0,
1153 .cra_init = safexcel_skcipher_cra_init,
1154 .cra_exit = safexcel_skcipher_cra_exit,
1155 .cra_module = THIS_MODULE,
1156 },
1157 },
1158};
1159
Antoine Tenartf6beaea2018-05-14 15:11:02 +02001160static int safexcel_aead_encrypt(struct aead_request *req)
1161{
1162 struct safexcel_cipher_req *creq = aead_request_ctx(req);
1163
Ofer Heifetza7dea8c2018-06-28 17:21:55 +02001164 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT,
1165 CONTEXT_CONTROL_CRYPTO_MODE_CBC, SAFEXCEL_AES);
Antoine Tenartf6beaea2018-05-14 15:11:02 +02001166}
1167
1168static int safexcel_aead_decrypt(struct aead_request *req)
1169{
1170 struct safexcel_cipher_req *creq = aead_request_ctx(req);
1171
Ofer Heifetza7dea8c2018-06-28 17:21:55 +02001172 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT,
1173 CONTEXT_CONTROL_CRYPTO_MODE_CBC, SAFEXCEL_AES);
Antoine Tenartf6beaea2018-05-14 15:11:02 +02001174}
1175
1176static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
1177{
1178 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1179 struct safexcel_alg_template *tmpl =
1180 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1181 alg.aead.base);
1182
1183 crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
1184 sizeof(struct safexcel_cipher_req));
1185
1186 ctx->priv = tmpl->priv;
1187
1188 ctx->aead = true;
1189 ctx->base.send = safexcel_aead_send;
1190 ctx->base.handle_result = safexcel_aead_handle_result;
1191 return 0;
1192}
1193
Antoine Tenart01ba0612018-05-14 15:11:04 +02001194static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
1195{
1196 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1197
1198 safexcel_aead_cra_init(tfm);
Ofer Heifetza7dea8c2018-06-28 17:21:55 +02001199 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
Antoine Tenart01ba0612018-05-14 15:11:04 +02001200 ctx->state_sz = SHA1_DIGEST_SIZE;
1201 return 0;
1202}
1203
1204struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
1205 .type = SAFEXCEL_ALG_TYPE_AEAD,
Antoine Tenart5eb09112018-06-28 17:15:38 +02001206 .engines = EIP97IES | EIP197B | EIP197D,
Antoine Tenart01ba0612018-05-14 15:11:04 +02001207 .alg.aead = {
1208 .setkey = safexcel_aead_aes_setkey,
1209 .encrypt = safexcel_aead_encrypt,
1210 .decrypt = safexcel_aead_decrypt,
1211 .ivsize = AES_BLOCK_SIZE,
1212 .maxauthsize = SHA1_DIGEST_SIZE,
1213 .base = {
1214 .cra_name = "authenc(hmac(sha1),cbc(aes))",
1215 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1216 .cra_priority = 300,
Eric Biggers3f4a5372018-06-30 15:16:14 -07001217 .cra_flags = CRYPTO_ALG_ASYNC |
Antoine Tenart01ba0612018-05-14 15:11:04 +02001218 CRYPTO_ALG_KERN_DRIVER_ONLY,
1219 .cra_blocksize = AES_BLOCK_SIZE,
1220 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1221 .cra_alignmask = 0,
1222 .cra_init = safexcel_aead_sha1_cra_init,
1223 .cra_exit = safexcel_aead_cra_exit,
1224 .cra_module = THIS_MODULE,
1225 },
1226 },
1227};
1228
Antoine Tenartf6beaea2018-05-14 15:11:02 +02001229static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
1230{
1231 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1232
1233 safexcel_aead_cra_init(tfm);
Ofer Heifetza7dea8c2018-06-28 17:21:55 +02001234 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
Antoine Tenartf6beaea2018-05-14 15:11:02 +02001235 ctx->state_sz = SHA256_DIGEST_SIZE;
1236 return 0;
1237}
1238
1239struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
1240 .type = SAFEXCEL_ALG_TYPE_AEAD,
Antoine Tenart5eb09112018-06-28 17:15:38 +02001241 .engines = EIP97IES | EIP197B | EIP197D,
Antoine Tenartf6beaea2018-05-14 15:11:02 +02001242 .alg.aead = {
1243 .setkey = safexcel_aead_aes_setkey,
1244 .encrypt = safexcel_aead_encrypt,
1245 .decrypt = safexcel_aead_decrypt,
1246 .ivsize = AES_BLOCK_SIZE,
1247 .maxauthsize = SHA256_DIGEST_SIZE,
1248 .base = {
1249 .cra_name = "authenc(hmac(sha256),cbc(aes))",
1250 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1251 .cra_priority = 300,
Eric Biggers3f4a5372018-06-30 15:16:14 -07001252 .cra_flags = CRYPTO_ALG_ASYNC |
Antoine Tenartf6beaea2018-05-14 15:11:02 +02001253 CRYPTO_ALG_KERN_DRIVER_ONLY,
1254 .cra_blocksize = AES_BLOCK_SIZE,
1255 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1256 .cra_alignmask = 0,
1257 .cra_init = safexcel_aead_sha256_cra_init,
1258 .cra_exit = safexcel_aead_cra_exit,
1259 .cra_module = THIS_MODULE,
1260 },
1261 },
1262};
Antoine Tenart678b2872018-05-14 15:11:03 +02001263
1264static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
1265{
1266 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1267
1268 safexcel_aead_cra_init(tfm);
Ofer Heifetza7dea8c2018-06-28 17:21:55 +02001269 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
Antoine Tenart678b2872018-05-14 15:11:03 +02001270 ctx->state_sz = SHA256_DIGEST_SIZE;
1271 return 0;
1272}
1273
1274struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1275 .type = SAFEXCEL_ALG_TYPE_AEAD,
Antoine Tenart5eb09112018-06-28 17:15:38 +02001276 .engines = EIP97IES | EIP197B | EIP197D,
Antoine Tenart678b2872018-05-14 15:11:03 +02001277 .alg.aead = {
1278 .setkey = safexcel_aead_aes_setkey,
1279 .encrypt = safexcel_aead_encrypt,
1280 .decrypt = safexcel_aead_decrypt,
1281 .ivsize = AES_BLOCK_SIZE,
1282 .maxauthsize = SHA224_DIGEST_SIZE,
1283 .base = {
1284 .cra_name = "authenc(hmac(sha224),cbc(aes))",
1285 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1286 .cra_priority = 300,
Eric Biggers3f4a5372018-06-30 15:16:14 -07001287 .cra_flags = CRYPTO_ALG_ASYNC |
Antoine Tenart678b2872018-05-14 15:11:03 +02001288 CRYPTO_ALG_KERN_DRIVER_ONLY,
1289 .cra_blocksize = AES_BLOCK_SIZE,
1290 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1291 .cra_alignmask = 0,
1292 .cra_init = safexcel_aead_sha224_cra_init,
1293 .cra_exit = safexcel_aead_cra_exit,
1294 .cra_module = THIS_MODULE,
1295 },
1296 },
1297};
Antoine Tenart87eee122018-05-29 14:13:48 +02001298
1299static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
1300{
1301 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1302
1303 safexcel_aead_cra_init(tfm);
Ofer Heifetza7dea8c2018-06-28 17:21:55 +02001304 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
Antoine Tenart87eee122018-05-29 14:13:48 +02001305 ctx->state_sz = SHA512_DIGEST_SIZE;
1306 return 0;
1307}
1308
1309struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
1310 .type = SAFEXCEL_ALG_TYPE_AEAD,
Antoine Tenart5eb09112018-06-28 17:15:38 +02001311 .engines = EIP97IES | EIP197B | EIP197D,
Antoine Tenart87eee122018-05-29 14:13:48 +02001312 .alg.aead = {
1313 .setkey = safexcel_aead_aes_setkey,
1314 .encrypt = safexcel_aead_encrypt,
1315 .decrypt = safexcel_aead_decrypt,
1316 .ivsize = AES_BLOCK_SIZE,
1317 .maxauthsize = SHA512_DIGEST_SIZE,
1318 .base = {
1319 .cra_name = "authenc(hmac(sha512),cbc(aes))",
1320 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1321 .cra_priority = 300,
Eric Biggers3f4a5372018-06-30 15:16:14 -07001322 .cra_flags = CRYPTO_ALG_ASYNC |
Antoine Tenart87eee122018-05-29 14:13:48 +02001323 CRYPTO_ALG_KERN_DRIVER_ONLY,
1324 .cra_blocksize = AES_BLOCK_SIZE,
1325 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1326 .cra_alignmask = 0,
1327 .cra_init = safexcel_aead_sha512_cra_init,
1328 .cra_exit = safexcel_aead_cra_exit,
1329 .cra_module = THIS_MODULE,
1330 },
1331 },
1332};
Antoine Tenartea23cb52018-05-29 14:13:52 +02001333
1334static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
1335{
1336 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1337
1338 safexcel_aead_cra_init(tfm);
Ofer Heifetza7dea8c2018-06-28 17:21:55 +02001339 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
Antoine Tenartea23cb52018-05-29 14:13:52 +02001340 ctx->state_sz = SHA512_DIGEST_SIZE;
1341 return 0;
1342}
1343
1344struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
1345 .type = SAFEXCEL_ALG_TYPE_AEAD,
Antoine Tenart5eb09112018-06-28 17:15:38 +02001346 .engines = EIP97IES | EIP197B | EIP197D,
Antoine Tenartea23cb52018-05-29 14:13:52 +02001347 .alg.aead = {
1348 .setkey = safexcel_aead_aes_setkey,
1349 .encrypt = safexcel_aead_encrypt,
1350 .decrypt = safexcel_aead_decrypt,
1351 .ivsize = AES_BLOCK_SIZE,
1352 .maxauthsize = SHA384_DIGEST_SIZE,
1353 .base = {
1354 .cra_name = "authenc(hmac(sha384),cbc(aes))",
1355 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1356 .cra_priority = 300,
Eric Biggers3f4a5372018-06-30 15:16:14 -07001357 .cra_flags = CRYPTO_ALG_ASYNC |
Antoine Tenartea23cb52018-05-29 14:13:52 +02001358 CRYPTO_ALG_KERN_DRIVER_ONLY,
1359 .cra_blocksize = AES_BLOCK_SIZE,
1360 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1361 .cra_alignmask = 0,
1362 .cra_init = safexcel_aead_sha384_cra_init,
1363 .cra_exit = safexcel_aead_cra_exit,
1364 .cra_module = THIS_MODULE,
1365 },
1366 },
1367};