Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: sha256 - implement base layer for SHA-256

To reduce the number of copies of boilerplate code throughout
the tree, this patch implements generic glue for the SHA-256
algorithm. This allows a specific arch or hardware implementation
to only implement the special handling that it needs.

The users need to supply an implementation of

void (sha256_block_fn)(struct sha256_state *sst, u8 const *src, int blocks)

and pass it to the SHA-256 base functions. For easy casting between the
prototype above and existing block functions that take a 'u32 state[]'
as their first argument, the 'state' member of struct sha256_state is
moved to the base of the struct.

Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

authored by

Ard Biesheuvel and committed by
Herbert Xu
11b8d5ef c4d5b9ff

+129 -1
+1 -1
include/crypto/sha.h
··· 71 71 }; 72 72 73 73 struct sha256_state { 74 - u64 count; 75 74 u32 state[SHA256_DIGEST_SIZE / 4]; 75 + u64 count; 76 76 u8 buf[SHA256_BLOCK_SIZE]; 77 77 }; 78 78
+128
include/crypto/sha256_base.h
··· 1 + /* 2 + * sha256_base.h - core logic for SHA-256 implementations 3 + * 4 + * Copyright (C) 2015 Linaro Ltd <ard.biesheuvel@linaro.org> 5 + * 6 + * This program is free software; you can redistribute it and/or modify 7 + * it under the terms of the GNU General Public License version 2 as 8 + * published by the Free Software Foundation. 9 + */ 10 + 11 + #include <crypto/internal/hash.h> 12 + #include <crypto/sha.h> 13 + #include <linux/crypto.h> 14 + #include <linux/module.h> 15 + 16 + #include <asm/unaligned.h> 17 + 18 + typedef void (sha256_block_fn)(struct sha256_state *sst, u8 const *src, 19 + int blocks); 20 + 21 + static inline int sha224_base_init(struct shash_desc *desc) 22 + { 23 + struct sha256_state *sctx = shash_desc_ctx(desc); 24 + 25 + sctx->state[0] = SHA224_H0; 26 + sctx->state[1] = SHA224_H1; 27 + sctx->state[2] = SHA224_H2; 28 + sctx->state[3] = SHA224_H3; 29 + sctx->state[4] = SHA224_H4; 30 + sctx->state[5] = SHA224_H5; 31 + sctx->state[6] = SHA224_H6; 32 + sctx->state[7] = SHA224_H7; 33 + sctx->count = 0; 34 + 35 + return 0; 36 + } 37 + 38 + static inline int sha256_base_init(struct shash_desc *desc) 39 + { 40 + struct sha256_state *sctx = shash_desc_ctx(desc); 41 + 42 + sctx->state[0] = SHA256_H0; 43 + sctx->state[1] = SHA256_H1; 44 + sctx->state[2] = SHA256_H2; 45 + sctx->state[3] = SHA256_H3; 46 + sctx->state[4] = SHA256_H4; 47 + sctx->state[5] = SHA256_H5; 48 + sctx->state[6] = SHA256_H6; 49 + sctx->state[7] = SHA256_H7; 50 + sctx->count = 0; 51 + 52 + return 0; 53 + } 54 + 55 + static inline int sha256_base_do_update(struct shash_desc *desc, 56 + const u8 *data, 57 + unsigned int len, 58 + sha256_block_fn *block_fn) 59 + { 60 + struct sha256_state *sctx = shash_desc_ctx(desc); 61 + unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; 62 + 63 + sctx->count += len; 64 + 65 + if (unlikely((partial + len) >= SHA256_BLOCK_SIZE)) { 66 + int blocks; 67 + 68 + if (partial) { 69 + int p = SHA256_BLOCK_SIZE - partial; 70 + 71 + memcpy(sctx->buf + partial, data, p); 72 + data += p; 73 + len -= p; 74 + 75 + block_fn(sctx, sctx->buf, 1); 76 + } 77 + 78 + blocks = len / SHA256_BLOCK_SIZE; 79 + len %= SHA256_BLOCK_SIZE; 80 + 81 + if (blocks) { 82 + block_fn(sctx, data, blocks); 83 + data += blocks * SHA256_BLOCK_SIZE; 84 + } 85 + partial = 0; 86 + } 87 + if (len) 88 + memcpy(sctx->buf + partial, data, len); 89 + 90 + return 0; 91 + } 92 + 93 + static inline int sha256_base_do_finalize(struct shash_desc *desc, 94 + sha256_block_fn *block_fn) 95 + { 96 + const int bit_offset = SHA256_BLOCK_SIZE - sizeof(__be64); 97 + struct sha256_state *sctx = shash_desc_ctx(desc); 98 + __be64 *bits = (__be64 *)(sctx->buf + bit_offset); 99 + unsigned int partial = sctx->count % SHA256_BLOCK_SIZE; 100 + 101 + sctx->buf[partial++] = 0x80; 102 + if (partial > bit_offset) { 103 + memset(sctx->buf + partial, 0x0, SHA256_BLOCK_SIZE - partial); 104 + partial = 0; 105 + 106 + block_fn(sctx, sctx->buf, 1); 107 + } 108 + 109 + memset(sctx->buf + partial, 0x0, bit_offset - partial); 110 + *bits = cpu_to_be64(sctx->count << 3); 111 + block_fn(sctx, sctx->buf, 1); 112 + 113 + return 0; 114 + } 115 + 116 + static inline int sha256_base_finish(struct shash_desc *desc, u8 *out) 117 + { 118 + unsigned int digest_size = crypto_shash_digestsize(desc->tfm); 119 + struct sha256_state *sctx = shash_desc_ctx(desc); 120 + __be32 *digest = (__be32 *)out; 121 + int i; 122 + 123 + for (i = 0; digest_size > 0; i++, digest_size -= sizeof(__be32)) 124 + put_unaligned_be32(sctx->state[i], digest++); 125 + 126 + *sctx = (struct sha256_state){}; 127 + return 0; 128 + }