Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

lib/crypto: sha256: Make library API use strongly-typed contexts

Currently the SHA-224 and SHA-256 library functions can be mixed
arbitrarily, even in ways that are incorrect, for example using
sha224_init() and sha256_final(). This is because they operate on the
same structure, sha256_state.

Introduce stronger typing, as I did for SHA-384 and SHA-512.

Also as I did for SHA-384 and SHA-512, use the names *_ctx instead of
*_state. The *_ctx names have the following small benefits:

- They're shorter.
- They avoid an ambiguity with the compression function state.
- They're consistent with the well-known OpenSSL API.
- Users usually name the variable 'sctx' anyway, which suggests that
*_ctx would be the more natural name for the actual struct.

Therefore: update the SHA-224 and SHA-256 APIs, implementation, and
calling code accordingly.

In the new structs, also strongly-type the compression function state.

Acked-by: Ard Biesheuvel <ardb@kernel.org>
Link: https://lore.kernel.org/r/20250630160645.3198-7-ebiggers@kernel.org
Signed-off-by: Eric Biggers <ebiggers@kernel.org>

+144 -68
+4 -4
arch/riscv/purgatory/purgatory.c
··· 20 20 static int verify_sha256_digest(void) 21 21 { 22 22 struct kexec_sha_region *ptr, *end; 23 - struct sha256_state ss; 23 + struct sha256_ctx sctx; 24 24 u8 digest[SHA256_DIGEST_SIZE]; 25 25 26 - sha256_init(&ss); 26 + sha256_init(&sctx); 27 27 end = purgatory_sha_regions + ARRAY_SIZE(purgatory_sha_regions); 28 28 for (ptr = purgatory_sha_regions; ptr < end; ptr++) 29 - sha256_update(&ss, (uint8_t *)(ptr->start), ptr->len); 30 - sha256_final(&ss, digest); 29 + sha256_update(&sctx, (uint8_t *)(ptr->start), ptr->len); 30 + sha256_final(&sctx, digest); 31 31 if (memcmp(digest, purgatory_sha256_digest, sizeof(digest)) != 0) 32 32 return 1; 33 33 return 0;
+1 -1
arch/s390/purgatory/purgatory.c
··· 16 16 { 17 17 struct kexec_sha_region *ptr, *end; 18 18 u8 digest[SHA256_DIGEST_SIZE]; 19 - struct sha256_state sctx; 19 + struct sha256_ctx sctx; 20 20 21 21 sha256_init(&sctx); 22 22 end = purgatory_sha_regions + ARRAY_SIZE(purgatory_sha_regions);
+1 -1
arch/x86/purgatory/purgatory.c
··· 25 25 { 26 26 struct kexec_sha_region *ptr, *end; 27 27 u8 digest[SHA256_DIGEST_SIZE]; 28 - struct sha256_state sctx; 28 + struct sha256_ctx sctx; 29 29 30 30 sha256_init(&sctx); 31 31 end = purgatory_sha_regions + ARRAY_SIZE(purgatory_sha_regions);
+8 -8
crypto/sha256.c
··· 137 137 138 138 static int crypto_sha256_import_lib(struct shash_desc *desc, const void *in) 139 139 { 140 - struct sha256_state *sctx = shash_desc_ctx(desc); 140 + struct __sha256_ctx *sctx = shash_desc_ctx(desc); 141 141 const u8 *p = in; 142 142 143 143 memcpy(sctx, p, sizeof(*sctx)); 144 144 p += sizeof(*sctx); 145 - sctx->count += *p; 145 + sctx->bytecount += *p; 146 146 return 0; 147 147 } 148 148 149 149 static int crypto_sha256_export_lib(struct shash_desc *desc, void *out) 150 150 { 151 - struct sha256_state *sctx0 = shash_desc_ctx(desc); 152 - struct sha256_state sctx = *sctx0; 151 + struct __sha256_ctx *sctx0 = shash_desc_ctx(desc); 152 + struct __sha256_ctx sctx = *sctx0; 153 153 unsigned int partial; 154 154 u8 *p = out; 155 155 156 - partial = sctx.count % SHA256_BLOCK_SIZE; 157 - sctx.count -= partial; 156 + partial = sctx.bytecount % SHA256_BLOCK_SIZE; 157 + sctx.bytecount -= partial; 158 158 memcpy(p, &sctx, sizeof(sctx)); 159 159 p += sizeof(sctx); 160 160 *p = partial; ··· 201 201 .update = crypto_sha256_update_lib, 202 202 .final = crypto_sha256_final_lib, 203 203 .digest = crypto_sha256_digest_lib, 204 - .descsize = sizeof(struct sha256_state), 204 + .descsize = sizeof(struct sha256_ctx), 205 205 .statesize = sizeof(struct crypto_sha256_state) + 206 206 SHA256_BLOCK_SIZE + 1, 207 207 .import = crypto_sha256_import_lib, ··· 216 216 .init = crypto_sha224_init, 217 217 .update = crypto_sha256_update_lib, 218 218 .final = crypto_sha224_final_lib, 219 - .descsize = sizeof(struct sha256_state), 219 + .descsize = sizeof(struct sha224_ctx), 220 220 .statesize = sizeof(struct crypto_sha256_state) + 221 221 SHA256_BLOCK_SIZE + 1, 222 222 .import = crypto_sha256_import_lib,
+6 -6
drivers/char/tpm/tpm2-sessions.c
··· 390 390 * on every operation, so we weld the hmac init and final functions in 391 391 * here to give it the same usage characteristics as a regular hash 392 392 */ 393 - static void tpm2_hmac_init(struct sha256_state *sctx, u8 *key, u32 key_len) 393 + static void tpm2_hmac_init(struct sha256_ctx *sctx, u8 *key, u32 key_len) 394 394 { 395 395 u8 pad[SHA256_BLOCK_SIZE]; 396 396 int i; ··· 406 406 sha256_update(sctx, pad, sizeof(pad)); 407 407 } 408 408 409 - static void tpm2_hmac_final(struct sha256_state *sctx, u8 *key, u32 key_len, 409 + static void tpm2_hmac_final(struct sha256_ctx *sctx, u8 *key, u32 key_len, 410 410 u8 *out) 411 411 { 412 412 u8 pad[SHA256_BLOCK_SIZE]; ··· 440 440 const __be32 bits = cpu_to_be32(bytes * 8); 441 441 442 442 while (bytes > 0) { 443 - struct sha256_state sctx; 443 + struct sha256_ctx sctx; 444 444 __be32 c = cpu_to_be32(counter); 445 445 446 446 tpm2_hmac_init(&sctx, key, key_len); ··· 467 467 static void tpm2_KDFe(u8 z[EC_PT_SZ], const char *str, u8 *pt_u, u8 *pt_v, 468 468 u8 *out) 469 469 { 470 - struct sha256_state sctx; 470 + struct sha256_ctx sctx; 471 471 /* 472 472 * this should be an iterative counter, but because we know 473 473 * we're only taking 32 bytes for the point using a sha256 ··· 592 592 u8 *hmac = NULL; 593 593 u32 attrs; 594 594 u8 cphash[SHA256_DIGEST_SIZE]; 595 - struct sha256_state sctx; 595 + struct sha256_ctx sctx; 596 596 597 597 if (!auth) 598 598 return; ··· 750 750 off_t offset_s, offset_p; 751 751 u8 rphash[SHA256_DIGEST_SIZE]; 752 752 u32 attrs, cc; 753 - struct sha256_state sctx; 753 + struct sha256_ctx sctx; 754 754 u16 tag = be16_to_cpu(head->tag); 755 755 int parm_len, len, i, handles; 756 756
+41 -11
include/crypto/sha2.h
··· 114 114 u8 buf[SHA512_BLOCK_SIZE]; 115 115 }; 116 116 117 - void sha256_update(struct sha256_state *sctx, const u8 *data, size_t len); 117 + /* State for the SHA-256 (and SHA-224) compression function */ 118 + struct sha256_block_state { 119 + u32 h[SHA256_STATE_WORDS]; 120 + }; 118 121 119 - static inline void sha224_init(struct sha256_state *sctx) 120 - { 121 - sha224_block_init(&sctx->ctx); 122 - } 123 - static inline void sha224_update(struct sha256_state *sctx, 122 + /* 123 + * Context structure, shared by SHA-224 and SHA-256. The sha224_ctx and 124 + * sha256_ctx structs wrap this one so that the API has proper typing and 125 + * doesn't allow mixing the SHA-224 and SHA-256 functions arbitrarily. 126 + */ 127 + struct __sha256_ctx { 128 + struct sha256_block_state state; 129 + u64 bytecount; 130 + u8 buf[SHA256_BLOCK_SIZE] __aligned(__alignof__(__be64)); 131 + }; 132 + void __sha256_update(struct __sha256_ctx *ctx, const u8 *data, size_t len); 133 + 134 + /** 135 + * struct sha224_ctx - Context for hashing a message with SHA-224 136 + * @ctx: private 137 + */ 138 + struct sha224_ctx { 139 + struct __sha256_ctx ctx; 140 + }; 141 + 142 + void sha224_init(struct sha224_ctx *ctx); 143 + static inline void sha224_update(struct sha224_ctx *ctx, 124 144 const u8 *data, size_t len) 125 145 { 126 - sha256_update(sctx, data, len); 146 + __sha256_update(&ctx->ctx, data, len); 127 147 } 128 - void sha224_final(struct sha256_state *sctx, u8 out[SHA224_DIGEST_SIZE]); 148 + void sha224_final(struct sha224_ctx *ctx, u8 out[SHA224_DIGEST_SIZE]); 129 149 void sha224(const u8 *data, size_t len, u8 out[SHA224_DIGEST_SIZE]); 130 150 131 - static inline void sha256_init(struct sha256_state *sctx) 151 + /** 152 + * struct sha256_ctx - Context for hashing a message with SHA-256 153 + * @ctx: private 154 + */ 155 + struct sha256_ctx { 156 + struct __sha256_ctx ctx; 157 + }; 158 + 159 + void sha256_init(struct sha256_ctx *ctx); 160 + static inline void sha256_update(struct sha256_ctx *ctx, 161 + const u8 *data, size_t len) 132 162 { 133 - sha256_block_init(&sctx->ctx); 163 + __sha256_update(&ctx->ctx, data, len); 134 164 } 135 - void sha256_final(struct sha256_state *sctx, u8 out[SHA256_DIGEST_SIZE]); 165 + void sha256_final(struct sha256_ctx *ctx, u8 out[SHA256_DIGEST_SIZE]); 136 166 void sha256(const u8 *data, size_t len, u8 out[SHA256_DIGEST_SIZE]); 137 167 138 168 /* State for the SHA-512 (and SHA-384) compression function */
+5 -5
kernel/kexec_file.c
··· 751 751 /* Calculate and store the digest of segments */ 752 752 static int kexec_calculate_store_digests(struct kimage *image) 753 753 { 754 - struct sha256_state state; 754 + struct sha256_ctx sctx; 755 755 int ret = 0, i, j, zero_buf_sz, sha_region_sz; 756 756 size_t nullsz; 757 757 u8 digest[SHA256_DIGEST_SIZE]; ··· 770 770 if (!sha_regions) 771 771 return -ENOMEM; 772 772 773 - sha256_init(&state); 773 + sha256_init(&sctx); 774 774 775 775 for (j = i = 0; i < image->nr_segments; i++) { 776 776 struct kexec_segment *ksegment; ··· 796 796 if (check_ima_segment_index(image, i)) 797 797 continue; 798 798 799 - sha256_update(&state, ksegment->kbuf, ksegment->bufsz); 799 + sha256_update(&sctx, ksegment->kbuf, ksegment->bufsz); 800 800 801 801 /* 802 802 * Assume rest of the buffer is filled with zero and ··· 808 808 809 809 if (bytes > zero_buf_sz) 810 810 bytes = zero_buf_sz; 811 - sha256_update(&state, zero_buf, bytes); 811 + sha256_update(&sctx, zero_buf, bytes); 812 812 nullsz -= bytes; 813 813 } 814 814 ··· 817 817 j++; 818 818 } 819 819 820 - sha256_final(&state, digest); 820 + sha256_final(&sctx, digest); 821 821 822 822 ret = kexec_purgatory_get_set_symbol(image, "purgatory_sha_regions", 823 823 sha_regions, sha_region_sz, 0);
+78 -32
lib/crypto/sha256.c
··· 18 18 #include <linux/module.h> 19 19 #include <linux/string.h> 20 20 21 + static const struct sha256_block_state sha224_iv = { 22 + .h = { 23 + SHA224_H0, SHA224_H1, SHA224_H2, SHA224_H3, 24 + SHA224_H4, SHA224_H5, SHA224_H6, SHA224_H7, 25 + }, 26 + }; 27 + 28 + static const struct sha256_block_state sha256_iv = { 29 + .h = { 30 + SHA256_H0, SHA256_H1, SHA256_H2, SHA256_H3, 31 + SHA256_H4, SHA256_H5, SHA256_H6, SHA256_H7, 32 + }, 33 + }; 34 + 21 35 /* 22 36 * If __DISABLE_EXPORTS is defined, then this file is being compiled for a 23 37 * pre-boot environment. In that case, ignore the kconfig options, pull the ··· 46 32 return __is_defined(__DISABLE_EXPORTS); 47 33 } 48 34 49 - static inline void sha256_blocks(u32 state[SHA256_STATE_WORDS], const u8 *data, 50 - size_t nblocks) 35 + static inline void sha256_blocks(struct sha256_block_state *state, 36 + const u8 *data, size_t nblocks) 51 37 { 52 - sha256_choose_blocks(state, data, nblocks, sha256_purgatory(), false); 38 + sha256_choose_blocks(state->h, data, nblocks, sha256_purgatory(), false); 53 39 } 54 40 55 - void sha256_update(struct sha256_state *sctx, const u8 *data, size_t len) 41 + static void __sha256_init(struct __sha256_ctx *ctx, 42 + const struct sha256_block_state *iv, 43 + u64 initial_bytecount) 56 44 { 57 - size_t partial = sctx->count % SHA256_BLOCK_SIZE; 58 - 59 - sctx->count += len; 60 - BLOCK_HASH_UPDATE_BLOCKS(sha256_blocks, sctx->ctx.state, data, len, 61 - SHA256_BLOCK_SIZE, sctx->buf, partial); 62 - } 63 - EXPORT_SYMBOL(sha256_update); 64 - 65 - static inline void __sha256_final(struct sha256_state *sctx, u8 *out, 66 - size_t digest_size) 67 - { 68 - size_t partial = sctx->count % SHA256_BLOCK_SIZE; 69 - 70 - sha256_finup(&sctx->ctx, sctx->buf, partial, out, digest_size, 71 - sha256_purgatory(), false); 72 - memzero_explicit(sctx, sizeof(*sctx)); 45 + ctx->state = *iv; 46 + ctx->bytecount = initial_bytecount; 73 47 } 74 48 75 - void sha224_final(struct sha256_state *sctx, u8 out[SHA224_DIGEST_SIZE]) 49 + void sha224_init(struct sha224_ctx *ctx) 76 50 { 77 - __sha256_final(sctx, out, SHA224_DIGEST_SIZE); 51 + __sha256_init(&ctx->ctx, &sha224_iv, 0); 52 + } 53 + EXPORT_SYMBOL_GPL(sha224_init); 54 + 55 + void sha256_init(struct sha256_ctx *ctx) 56 + { 57 + __sha256_init(&ctx->ctx, &sha256_iv, 0); 58 + } 59 + EXPORT_SYMBOL_GPL(sha256_init); 60 + 61 + void __sha256_update(struct __sha256_ctx *ctx, const u8 *data, size_t len) 62 + { 63 + size_t partial = ctx->bytecount % SHA256_BLOCK_SIZE; 64 + 65 + ctx->bytecount += len; 66 + BLOCK_HASH_UPDATE_BLOCKS(sha256_blocks, &ctx->state, data, len, 67 + SHA256_BLOCK_SIZE, ctx->buf, partial); 68 + } 69 + EXPORT_SYMBOL(__sha256_update); 70 + 71 + static void __sha256_final(struct __sha256_ctx *ctx, 72 + u8 *out, size_t digest_size) 73 + { 74 + u64 bitcount = ctx->bytecount << 3; 75 + size_t partial = ctx->bytecount % SHA256_BLOCK_SIZE; 76 + 77 + ctx->buf[partial++] = 0x80; 78 + if (partial > SHA256_BLOCK_SIZE - 8) { 79 + memset(&ctx->buf[partial], 0, SHA256_BLOCK_SIZE - partial); 80 + sha256_blocks(&ctx->state, ctx->buf, 1); 81 + partial = 0; 82 + } 83 + memset(&ctx->buf[partial], 0, SHA256_BLOCK_SIZE - 8 - partial); 84 + *(__be64 *)&ctx->buf[SHA256_BLOCK_SIZE - 8] = cpu_to_be64(bitcount); 85 + sha256_blocks(&ctx->state, ctx->buf, 1); 86 + 87 + for (size_t i = 0; i < digest_size; i += 4) 88 + put_unaligned_be32(ctx->state.h[i / 4], out + i); 89 + } 90 + 91 + void sha224_final(struct sha224_ctx *ctx, u8 out[SHA224_DIGEST_SIZE]) 92 + { 93 + __sha256_final(&ctx->ctx, out, SHA224_DIGEST_SIZE); 94 + memzero_explicit(ctx, sizeof(*ctx)); 78 95 } 79 96 EXPORT_SYMBOL(sha224_final); 80 97 81 - void sha256_final(struct sha256_state *sctx, u8 out[SHA256_DIGEST_SIZE]) 98 + void sha256_final(struct sha256_ctx *ctx, u8 out[SHA256_DIGEST_SIZE]) 82 99 { 83 - __sha256_final(sctx, out, SHA256_DIGEST_SIZE); 100 + __sha256_final(&ctx->ctx, out, SHA256_DIGEST_SIZE); 101 + memzero_explicit(ctx, sizeof(*ctx)); 84 102 } 85 103 EXPORT_SYMBOL(sha256_final); 86 104 87 105 void sha224(const u8 *data, size_t len, u8 out[SHA224_DIGEST_SIZE]) 88 106 { 89 - struct sha256_state sctx; 107 + struct sha224_ctx ctx; 90 108 91 - sha224_init(&sctx); 92 - sha224_update(&sctx, data, len); 93 - sha224_final(&sctx, out); 109 + sha224_init(&ctx); 110 + sha224_update(&ctx, data, len); 111 + sha224_final(&ctx, out); 94 112 } 95 113 EXPORT_SYMBOL(sha224); 96 114 97 115 void sha256(const u8 *data, size_t len, u8 out[SHA256_DIGEST_SIZE]) 98 116 { 99 - struct sha256_state sctx; 117 + struct sha256_ctx ctx; 100 118 101 - sha256_init(&sctx); 102 - sha256_update(&sctx, data, len); 103 - sha256_final(&sctx, out); 119 + sha256_init(&ctx); 120 + sha256_update(&ctx, data, len); 121 + sha256_final(&ctx, out); 104 122 } 105 123 EXPORT_SYMBOL(sha256); 106 124