Thomas Gleixner | d2912cb | 2019-06-04 10:11:33 +0200 | [diff] [blame] | 1 | /* SPDX-License-Identifier: GPL-2.0-only */ |
Ard Biesheuvel | 11b8d5e | 2015-04-09 12:55:34 +0200 | [diff] [blame] | 2 | /* |
| 3 | * sha256_base.h - core logic for SHA-256 implementations |
| 4 | * |
| 5 | * Copyright (C) 2015 Linaro Ltd <ard.biesheuvel@linaro.org> |
Ard Biesheuvel | 11b8d5e | 2015-04-09 12:55:34 +0200 | [diff] [blame] | 6 | */ |
| 7 | |
Masahiro Yamada | 68289c6 | 2019-07-23 20:43:43 +0900 | [diff] [blame] | 8 | #ifndef _CRYPTO_SHA256_BASE_H |
| 9 | #define _CRYPTO_SHA256_BASE_H |
| 10 | |
Ard Biesheuvel | 11b8d5e | 2015-04-09 12:55:34 +0200 | [diff] [blame] | 11 | #include <crypto/internal/hash.h> |
Eric Biggers | a24d22b | 2020-11-12 21:20:21 -0800 | [diff] [blame] | 12 | #include <crypto/sha2.h> |
Ard Biesheuvel | 11b8d5e | 2015-04-09 12:55:34 +0200 | [diff] [blame] | 13 | #include <linux/crypto.h> |
| 14 | #include <linux/module.h> |
Arvind Sankar | 458c048 | 2020-10-25 10:31:15 -0400 | [diff] [blame] | 15 | #include <linux/string.h> |
Ard Biesheuvel | 11b8d5e | 2015-04-09 12:55:34 +0200 | [diff] [blame] | 16 | |
| 17 | #include <asm/unaligned.h> |
| 18 | |
| 19 | typedef void (sha256_block_fn)(struct sha256_state *sst, u8 const *src, |
| 20 | int blocks); |
| 21 | |
| 22 | static inline int sha224_base_init(struct shash_desc *desc) |
| 23 | { |
| 24 | struct sha256_state *sctx = shash_desc_ctx(desc); |
| 25 | |
Eric Biggers | 13855fd | 2020-05-01 09:42:29 -0700 | [diff] [blame] | 26 | sha224_init(sctx); |
| 27 | return 0; |
Ard Biesheuvel | 11b8d5e | 2015-04-09 12:55:34 +0200 | [diff] [blame] | 28 | } |
| 29 | |
| 30 | static inline int sha256_base_init(struct shash_desc *desc) |
| 31 | { |
| 32 | struct sha256_state *sctx = shash_desc_ctx(desc); |
| 33 | |
Eric Biggers | 13855fd | 2020-05-01 09:42:29 -0700 | [diff] [blame] | 34 | sha256_init(sctx); |
| 35 | return 0; |
Ard Biesheuvel | 11b8d5e | 2015-04-09 12:55:34 +0200 | [diff] [blame] | 36 | } |
| 37 | |
| 38 | static inline int sha256_base_do_update(struct shash_desc *desc, |
| 39 | const u8 *data, |
| 40 | unsigned int len, |
| 41 | sha256_block_fn *block_fn) |
| 42 | { |
| 43 | struct sha256_state *sctx = shash_desc_ctx(desc); |
| 44 | unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; |
| 45 | |
| 46 | sctx->count += len; |
| 47 | |
| 48 | if (unlikely((partial + len) >= SHA256_BLOCK_SIZE)) { |
| 49 | int blocks; |
| 50 | |
| 51 | if (partial) { |
| 52 | int p = SHA256_BLOCK_SIZE - partial; |
| 53 | |
| 54 | memcpy(sctx->buf + partial, data, p); |
| 55 | data += p; |
| 56 | len -= p; |
| 57 | |
| 58 | block_fn(sctx, sctx->buf, 1); |
| 59 | } |
| 60 | |
| 61 | blocks = len / SHA256_BLOCK_SIZE; |
| 62 | len %= SHA256_BLOCK_SIZE; |
| 63 | |
| 64 | if (blocks) { |
| 65 | block_fn(sctx, data, blocks); |
| 66 | data += blocks * SHA256_BLOCK_SIZE; |
| 67 | } |
| 68 | partial = 0; |
| 69 | } |
| 70 | if (len) |
| 71 | memcpy(sctx->buf + partial, data, len); |
| 72 | |
| 73 | return 0; |
| 74 | } |
| 75 | |
| 76 | static inline int sha256_base_do_finalize(struct shash_desc *desc, |
| 77 | sha256_block_fn *block_fn) |
| 78 | { |
| 79 | const int bit_offset = SHA256_BLOCK_SIZE - sizeof(__be64); |
| 80 | struct sha256_state *sctx = shash_desc_ctx(desc); |
| 81 | __be64 *bits = (__be64 *)(sctx->buf + bit_offset); |
| 82 | unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; |
| 83 | |
| 84 | sctx->buf[partial++] = 0x80; |
| 85 | if (partial > bit_offset) { |
| 86 | memset(sctx->buf + partial, 0x0, SHA256_BLOCK_SIZE - partial); |
| 87 | partial = 0; |
| 88 | |
| 89 | block_fn(sctx, sctx->buf, 1); |
| 90 | } |
| 91 | |
| 92 | memset(sctx->buf + partial, 0x0, bit_offset - partial); |
| 93 | *bits = cpu_to_be64(sctx->count << 3); |
| 94 | block_fn(sctx, sctx->buf, 1); |
| 95 | |
| 96 | return 0; |
| 97 | } |
| 98 | |
| 99 | static inline int sha256_base_finish(struct shash_desc *desc, u8 *out) |
| 100 | { |
| 101 | unsigned int digest_size = crypto_shash_digestsize(desc->tfm); |
| 102 | struct sha256_state *sctx = shash_desc_ctx(desc); |
| 103 | __be32 *digest = (__be32 *)out; |
| 104 | int i; |
| 105 | |
| 106 | for (i = 0; digest_size > 0; i++, digest_size -= sizeof(__be32)) |
| 107 | put_unaligned_be32(sctx->state[i], digest++); |
| 108 | |
Arvind Sankar | 458c048 | 2020-10-25 10:31:15 -0400 | [diff] [blame] | 109 | memzero_explicit(sctx, sizeof(*sctx)); |
Ard Biesheuvel | 11b8d5e | 2015-04-09 12:55:34 +0200 | [diff] [blame] | 110 | return 0; |
| 111 | } |
Masahiro Yamada | 68289c6 | 2019-07-23 20:43:43 +0900 | [diff] [blame] | 112 | |
| 113 | #endif /* _CRYPTO_SHA256_BASE_H */ |