Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: lib/sha256 - Move partial block handling out

Extract the common partial block handling into a helper macro
that can be reused by other library code.

Also delete the unused sha256_base_do_finalize function.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

Herbert Xu 74a43a2c fba4aafa

+62 -37
+52
include/crypto/internal/blockhash.h
··· 1 + /* SPDX-License-Identifier: GPL-2.0-or-later */ 2 + /* 3 + * Handle partial blocks for block hash. 4 + * 5 + * Copyright (c) 2015 Linaro Ltd <ard.biesheuvel@linaro.org> 6 + * Copyright (c) 2025 Herbert Xu <herbert@gondor.apana.org.au> 7 + */ 8 + 9 + #ifndef _CRYPTO_INTERNAL_BLOCKHASH_H 10 + #define _CRYPTO_INTERNAL_BLOCKHASH_H 11 + 12 + #include <linux/string.h> 13 + #include <linux/types.h> 14 + 15 + #define BLOCK_HASH_UPDATE_BASE(block_fn, state, src, nbytes, bs, dv, \ 16 + buf, buflen) \ 17 + ({ \ 18 + typeof(block_fn) *_block_fn = &(block_fn); \ 19 + typeof(state + 0) _state = (state); \ 20 + unsigned int _buflen = (buflen); \ 21 + size_t _nbytes = (nbytes); \ 22 + unsigned int _bs = (bs); \ 23 + const u8 *_src = (src); \ 24 + u8 *_buf = (buf); \ 25 + while ((_buflen + _nbytes) >= _bs) { \ 26 + const u8 *data = _src; \ 27 + size_t len = _nbytes; \ 28 + size_t blocks; \ 29 + int remain; \ 30 + if (_buflen) { \ 31 + remain = _bs - _buflen; \ 32 + memcpy(_buf + _buflen, _src, remain); \ 33 + data = _buf; \ 34 + len = _bs; \ 35 + } \ 36 + remain = len % bs; \ 37 + blocks = (len - remain) / (dv); \ 38 + (*_block_fn)(_state, data, blocks); \ 39 + _src += len - remain - _buflen; \ 40 + _nbytes -= len - remain - _buflen; \ 41 + _buflen = 0; \ 42 + } \ 43 + memcpy(_buf + _buflen, _src, _nbytes); \ 44 + _buflen += _nbytes; \ 45 + }) 46 + 47 + #define BLOCK_HASH_UPDATE(block, state, src, nbytes, bs, buf, buflen) \ 48 + BLOCK_HASH_UPDATE_BASE(block, state, src, nbytes, bs, 1, buf, buflen) 49 + #define BLOCK_HASH_UPDATE_BLOCKS(block, state, src, nbytes, bs, buf, buflen) \ 50 + BLOCK_HASH_UPDATE_BASE(block, state, src, nbytes, bs, bs, buf, buflen) 51 + 52 + #endif /* _CRYPTO_INTERNAL_BLOCKHASH_H */
+7 -2
include/crypto/sha2.h
··· 71 71 }; 72 72 73 73 struct sha256_state { 74 - u32 state[SHA256_DIGEST_SIZE / 4]; 75 - u64 count; 74 + union { 75 + struct crypto_sha256_state ctx; 76 + struct { 77 + u32 state[SHA256_DIGEST_SIZE / 4]; 78 + u64 count; 79 + }; 80 + }; 76 81 u8 buf[SHA256_BLOCK_SIZE]; 77 82 }; 78 83
+3 -35
include/crypto/sha256_base.h
··· 8 8 #ifndef _CRYPTO_SHA256_BASE_H 9 9 #define _CRYPTO_SHA256_BASE_H 10 10 11 + #include <crypto/internal/blockhash.h> 11 12 #include <crypto/internal/hash.h> 12 13 #include <crypto/sha2.h> 13 14 #include <linux/math.h> ··· 41 40 sha256_block_fn *block_fn) 42 41 { 43 42 unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; 44 - struct crypto_sha256_state *state = (void *)sctx; 45 43 46 44 sctx->count += len; 47 - 48 - if (unlikely((partial + len) >= SHA256_BLOCK_SIZE)) { 49 - int blocks; 50 - 51 - if (partial) { 52 - int p = SHA256_BLOCK_SIZE - partial; 53 - 54 - memcpy(sctx->buf + partial, data, p); 55 - data += p; 56 - len -= p; 57 - 58 - block_fn(state, sctx->buf, 1); 59 - } 60 - 61 - blocks = len / SHA256_BLOCK_SIZE; 62 - len %= SHA256_BLOCK_SIZE; 63 - 64 - if (blocks) { 65 - block_fn(state, data, blocks); 66 - data += blocks * SHA256_BLOCK_SIZE; 67 - } 68 - partial = 0; 69 - } 70 - if (len) 71 - memcpy(sctx->buf + partial, data, len); 72 - 45 + BLOCK_HASH_UPDATE_BLOCKS(block_fn, &sctx->ctx, data, len, 46 + SHA256_BLOCK_SIZE, sctx->buf, partial); 73 47 return 0; 74 48 } 75 49 ··· 114 138 115 139 sctx->count -= partial; 116 140 return lib_sha256_base_do_finup(state, sctx->buf, partial, block_fn); 117 - } 118 - 119 - static inline int sha256_base_do_finalize(struct shash_desc *desc, 120 - sha256_block_fn *block_fn) 121 - { 122 - struct sha256_state *sctx = shash_desc_ctx(desc); 123 - 124 - return lib_sha256_base_do_finalize(sctx, block_fn); 125 141 } 126 142 127 143 static inline int __sha256_base_finish(u32 state[SHA256_DIGEST_SIZE / 4],