Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

lib/crypto: sha256: Propagate sha256_block_state type to implementations

The previous commit made the SHA-256 compression function state be
strongly typed, but it wasn't propagated all the way down to the
implementations of it. Do that now.

Acked-by: Ard Biesheuvel <ardb@kernel.org>
Link: https://lore.kernel.org/r/20250630160645.3198-8-ebiggers@kernel.org
Signed-off-by: Eric Biggers <ebiggers@kernel.org>

+53 -39
+1 -1
arch/mips/cavium-octeon/crypto/octeon-sha256.c
··· 22 22 * We pass everything as 64-bit. OCTEON can handle misaligned data. 23 23 */ 24 24 25 - void sha256_blocks_arch(u32 state[SHA256_STATE_WORDS], 25 + void sha256_blocks_arch(struct sha256_block_state *state, 26 26 const u8 *data, size_t nblocks) 27 27 { 28 28 struct octeon_cop2_state cop2_state;
+4 -4
include/crypto/internal/sha2.h
··· 17 17 return false; 18 18 } 19 19 #endif 20 - void sha256_blocks_generic(u32 state[SHA256_STATE_WORDS], 20 + void sha256_blocks_generic(struct sha256_block_state *state, 21 21 const u8 *data, size_t nblocks); 22 - void sha256_blocks_arch(u32 state[SHA256_STATE_WORDS], 22 + void sha256_blocks_arch(struct sha256_block_state *state, 23 23 const u8 *data, size_t nblocks); 24 24 25 25 static __always_inline void sha256_choose_blocks( ··· 27 27 bool force_generic, bool force_simd) 28 28 { 29 29 if (!IS_ENABLED(CONFIG_CRYPTO_ARCH_HAVE_LIB_SHA256) || force_generic) 30 - sha256_blocks_generic(state, data, nblocks); 30 + sha256_blocks_generic((struct sha256_block_state *)state, data, nblocks); 31 31 else 32 - sha256_blocks_arch(state, data, nblocks); 32 + sha256_blocks_arch((struct sha256_block_state *)state, data, nblocks); 33 33 } 34 34 35 35 static __always_inline void sha256_finup(
+1 -1
lib/crypto/arm/sha256-ce.S
··· 67 67 .word 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2 68 68 69 69 /* 70 - * void sha256_ce_transform(u32 state[SHA256_STATE_WORDS], 70 + * void sha256_ce_transform(struct sha256_block_state *state, 71 71 * const u8 *data, size_t nblocks); 72 72 */ 73 73 ENTRY(sha256_ce_transform)
+4 -4
lib/crypto/arm/sha256.c
··· 10 10 #include <linux/kernel.h> 11 11 #include <linux/module.h> 12 12 13 - asmlinkage void sha256_block_data_order(u32 state[SHA256_STATE_WORDS], 13 + asmlinkage void sha256_block_data_order(struct sha256_block_state *state, 14 14 const u8 *data, size_t nblocks); 15 - asmlinkage void sha256_block_data_order_neon(u32 state[SHA256_STATE_WORDS], 15 + asmlinkage void sha256_block_data_order_neon(struct sha256_block_state *state, 16 16 const u8 *data, size_t nblocks); 17 - asmlinkage void sha256_ce_transform(u32 state[SHA256_STATE_WORDS], 17 + asmlinkage void sha256_ce_transform(struct sha256_block_state *state, 18 18 const u8 *data, size_t nblocks); 19 19 20 20 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_neon); 21 21 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_ce); 22 22 23 - void sha256_blocks_arch(u32 state[SHA256_STATE_WORDS], 23 + void sha256_blocks_arch(struct sha256_block_state *state, 24 24 const u8 *data, size_t nblocks) 25 25 { 26 26 if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) &&
+1 -1
lib/crypto/arm64/sha256-ce.S
··· 71 71 .word 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2 72 72 73 73 /* 74 - * size_t __sha256_ce_transform(u32 state[SHA256_STATE_WORDS], 74 + * size_t __sha256_ce_transform(struct sha256_block_state *state, 75 75 * const u8 *data, size_t nblocks); 76 76 */ 77 77 .text
+4 -4
lib/crypto/arm64/sha256.c
··· 10 10 #include <linux/kernel.h> 11 11 #include <linux/module.h> 12 12 13 - asmlinkage void sha256_block_data_order(u32 state[SHA256_STATE_WORDS], 13 + asmlinkage void sha256_block_data_order(struct sha256_block_state *state, 14 14 const u8 *data, size_t nblocks); 15 - asmlinkage void sha256_block_neon(u32 state[SHA256_STATE_WORDS], 15 + asmlinkage void sha256_block_neon(struct sha256_block_state *state, 16 16 const u8 *data, size_t nblocks); 17 - asmlinkage size_t __sha256_ce_transform(u32 state[SHA256_STATE_WORDS], 17 + asmlinkage size_t __sha256_ce_transform(struct sha256_block_state *state, 18 18 const u8 *data, size_t nblocks); 19 19 20 20 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_neon); 21 21 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_ce); 22 22 23 - void sha256_blocks_arch(u32 state[SHA256_STATE_WORDS], 23 + void sha256_blocks_arch(struct sha256_block_state *state, 24 24 const u8 *data, size_t nblocks) 25 25 { 26 26 if (IS_ENABLED(CONFIG_KERNEL_MODE_NEON) &&
+3 -2
lib/crypto/powerpc/sha256.c
··· 26 26 */ 27 27 #define MAX_BYTES 1024 28 28 29 - extern void ppc_spe_sha256_transform(u32 *state, const u8 *src, u32 blocks); 29 + extern void ppc_spe_sha256_transform(struct sha256_block_state *state, 30 + const u8 *src, u32 blocks); 30 31 31 32 static void spe_begin(void) 32 33 { ··· 43 42 preempt_enable(); 44 43 } 45 44 46 - void sha256_blocks_arch(u32 state[SHA256_STATE_WORDS], 45 + void sha256_blocks_arch(struct sha256_block_state *state, 47 46 const u8 *data, size_t nblocks) 48 47 { 49 48 do {
+1 -1
lib/crypto/riscv/sha256-riscv64-zvknha_or_zvknhb-zvkb.S
··· 106 106 sha256_4rounds \last, \k3, W3, W0, W1, W2 107 107 .endm 108 108 109 - // void sha256_transform_zvknha_or_zvknhb_zvkb(u32 state[SHA256_STATE_WORDS], 109 + // void sha256_transform_zvknha_or_zvknhb_zvkb(struct sha256_block_state *state, 110 110 // const u8 *data, size_t nblocks); 111 111 SYM_FUNC_START(sha256_transform_zvknha_or_zvknhb_zvkb) 112 112
+4 -3
lib/crypto/riscv/sha256.c
··· 15 15 #include <linux/kernel.h> 16 16 #include <linux/module.h> 17 17 18 - asmlinkage void sha256_transform_zvknha_or_zvknhb_zvkb( 19 - u32 state[SHA256_STATE_WORDS], const u8 *data, size_t nblocks); 18 + asmlinkage void 19 + sha256_transform_zvknha_or_zvknhb_zvkb(struct sha256_block_state *state, 20 + const u8 *data, size_t nblocks); 20 21 21 22 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_extensions); 22 23 23 - void sha256_blocks_arch(u32 state[SHA256_STATE_WORDS], 24 + void sha256_blocks_arch(struct sha256_block_state *state, 24 25 const u8 *data, size_t nblocks) 25 26 { 26 27 if (static_branch_likely(&have_extensions) && crypto_simd_usable()) {
+1 -1
lib/crypto/s390/sha256.c
··· 12 12 13 13 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_cpacf_sha256); 14 14 15 - void sha256_blocks_arch(u32 state[SHA256_STATE_WORDS], 15 + void sha256_blocks_arch(struct sha256_block_state *state, 16 16 const u8 *data, size_t nblocks) 17 17 { 18 18 if (static_branch_likely(&have_cpacf_sha256))
+18 -6
lib/crypto/sha256-generic.c
··· 70 70 h = t1 + t2; \ 71 71 } while (0) 72 72 73 - static void sha256_block_generic(u32 state[SHA256_STATE_WORDS], 73 + static void sha256_block_generic(struct sha256_block_state *state, 74 74 const u8 *input, u32 W[64]) 75 75 { 76 76 u32 a, b, c, d, e, f, g, h; ··· 101 101 } 102 102 103 103 /* load the state into our registers */ 104 - a = state[0]; b = state[1]; c = state[2]; d = state[3]; 105 - e = state[4]; f = state[5]; g = state[6]; h = state[7]; 104 + a = state->h[0]; 105 + b = state->h[1]; 106 + c = state->h[2]; 107 + d = state->h[3]; 108 + e = state->h[4]; 109 + f = state->h[5]; 110 + g = state->h[6]; 111 + h = state->h[7]; 106 112 107 113 /* now iterate */ 108 114 for (i = 0; i < 64; i += 8) { ··· 122 116 SHA256_ROUND(i + 7, b, c, d, e, f, g, h, a); 123 117 } 124 118 125 - state[0] += a; state[1] += b; state[2] += c; state[3] += d; 126 - state[4] += e; state[5] += f; state[6] += g; state[7] += h; 119 + state->h[0] += a; 120 + state->h[1] += b; 121 + state->h[2] += c; 122 + state->h[3] += d; 123 + state->h[4] += e; 124 + state->h[5] += f; 125 + state->h[6] += g; 126 + state->h[7] += h; 127 127 } 128 128 129 - void sha256_blocks_generic(u32 state[SHA256_STATE_WORDS], 129 + void sha256_blocks_generic(struct sha256_block_state *state, 130 130 const u8 *data, size_t nblocks) 131 131 { 132 132 u32 W[64];
+2 -2
lib/crypto/sparc/sha256.c
··· 19 19 20 20 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_sha256_opcodes); 21 21 22 - asmlinkage void sha256_sparc64_transform(u32 state[SHA256_STATE_WORDS], 22 + asmlinkage void sha256_sparc64_transform(struct sha256_block_state *state, 23 23 const u8 *data, size_t nblocks); 24 24 25 - void sha256_blocks_arch(u32 state[SHA256_STATE_WORDS], 25 + void sha256_blocks_arch(struct sha256_block_state *state, 26 26 const u8 *data, size_t nblocks) 27 27 { 28 28 if (static_branch_likely(&have_sha256_opcodes))
+1 -1
lib/crypto/x86/sha256-avx-asm.S
··· 341 341 .endm 342 342 343 343 ######################################################################## 344 - ## void sha256_transform_avx(u32 state[SHA256_STATE_WORDS], 344 + ## void sha256_transform_avx(struct sha256_block_state *state, 345 345 ## const u8 *data, size_t nblocks); 346 346 ######################################################################## 347 347 .text
+1 -1
lib/crypto/x86/sha256-avx2-asm.S
··· 518 518 .endm 519 519 520 520 ######################################################################## 521 - ## void sha256_transform_rorx(u32 state[SHA256_STATE_WORDS], 521 + ## void sha256_transform_rorx(struct sha256_block_state *state, 522 522 ## const u8 *data, size_t nblocks); 523 523 ######################################################################## 524 524 .text
+1 -1
lib/crypto/x86/sha256-ni-asm.S
··· 106 106 * only processes complete blocks. State initialization, buffering of partial 107 107 * blocks, and digest finalization is expected to be handled elsewhere. 108 108 * 109 - * void sha256_ni_transform(u32 state[SHA256_STATE_WORDS], 109 + * void sha256_ni_transform(struct sha256_block_state *state, 110 110 * const u8 *data, size_t nblocks); 111 111 */ 112 112 .text
+1 -1
lib/crypto/x86/sha256-ssse3-asm.S
··· 348 348 .endm 349 349 350 350 ######################################################################## 351 - ## void sha256_transform_ssse3(u32 state[SHA256_STATE_WORDS], 351 + ## void sha256_transform_ssse3(struct sha256_block_state *state, 352 352 ## const u8 *data, size_t nblocks); 353 353 ######################################################################## 354 354 .text
+5 -5
lib/crypto/x86/sha256.c
··· 11 11 #include <linux/module.h> 12 12 #include <linux/static_call.h> 13 13 14 - asmlinkage void sha256_transform_ssse3(u32 state[SHA256_STATE_WORDS], 14 + asmlinkage void sha256_transform_ssse3(struct sha256_block_state *state, 15 15 const u8 *data, size_t nblocks); 16 - asmlinkage void sha256_transform_avx(u32 state[SHA256_STATE_WORDS], 16 + asmlinkage void sha256_transform_avx(struct sha256_block_state *state, 17 17 const u8 *data, size_t nblocks); 18 - asmlinkage void sha256_transform_rorx(u32 state[SHA256_STATE_WORDS], 18 + asmlinkage void sha256_transform_rorx(struct sha256_block_state *state, 19 19 const u8 *data, size_t nblocks); 20 - asmlinkage void sha256_ni_transform(u32 state[SHA256_STATE_WORDS], 20 + asmlinkage void sha256_ni_transform(struct sha256_block_state *state, 21 21 const u8 *data, size_t nblocks); 22 22 23 23 static __ro_after_init DEFINE_STATIC_KEY_FALSE(have_sha256_x86); 24 24 25 25 DEFINE_STATIC_CALL(sha256_blocks_x86, sha256_transform_ssse3); 26 26 27 - void sha256_blocks_arch(u32 state[SHA256_STATE_WORDS], 27 + void sha256_blocks_arch(struct sha256_block_state *state, 28 28 const u8 *data, size_t nblocks) 29 29 { 30 30 if (static_branch_likely(&have_sha256_x86) && crypto_simd_usable()) {