Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: arm/ghash - Use API partial block handling

Use the Crypto API partial block handling.

Also switch to the generic export format.

Finally remove a couple of stray may_use_simd() calls in gcm.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

Herbert Xu 08bcb691 f5bd7e84

+50 -60
+50 -60
arch/arm/crypto/ghash-ce-glue.c
··· 8 8 9 9 #include <asm/hwcap.h> 10 10 #include <asm/neon.h> 11 - #include <asm/simd.h> 12 - #include <linux/unaligned.h> 13 11 #include <crypto/aes.h> 14 - #include <crypto/gcm.h> 15 12 #include <crypto/b128ops.h> 16 - #include <crypto/cryptd.h> 13 + #include <crypto/gcm.h> 14 + #include <crypto/gf128mul.h> 15 + #include <crypto/ghash.h> 17 16 #include <crypto/internal/aead.h> 18 17 #include <crypto/internal/hash.h> 19 - #include <crypto/internal/simd.h> 20 18 #include <crypto/internal/skcipher.h> 21 - #include <crypto/gf128mul.h> 22 19 #include <crypto/scatterwalk.h> 23 20 #include <linux/cpufeature.h> 24 - #include <linux/crypto.h> 21 + #include <linux/errno.h> 25 22 #include <linux/jump_label.h> 23 + #include <linux/kernel.h> 26 24 #include <linux/module.h> 25 + #include <linux/string.h> 26 + #include <linux/unaligned.h> 27 27 28 28 MODULE_DESCRIPTION("GHASH hash function using ARMv8 Crypto Extensions"); 29 29 MODULE_AUTHOR("Ard Biesheuvel <ardb@kernel.org>"); ··· 31 31 MODULE_ALIAS_CRYPTO("ghash"); 32 32 MODULE_ALIAS_CRYPTO("gcm(aes)"); 33 33 MODULE_ALIAS_CRYPTO("rfc4106(gcm(aes))"); 34 - 35 - #define GHASH_BLOCK_SIZE 16 36 - #define GHASH_DIGEST_SIZE 16 37 34 38 35 #define RFC4106_NONCE_SIZE 4 39 36 ··· 46 49 u8 nonce[]; // for RFC4106 nonce 47 50 }; 48 51 49 - struct ghash_desc_ctx { 52 + struct arm_ghash_desc_ctx { 50 53 u64 digest[GHASH_DIGEST_SIZE/sizeof(u64)]; 51 - u8 buf[GHASH_BLOCK_SIZE]; 52 - u32 count; 53 54 }; 54 55 55 56 asmlinkage void pmull_ghash_update_p64(int blocks, u64 dg[], const char *src, ··· 60 65 61 66 static int ghash_init(struct shash_desc *desc) 62 67 { 63 - struct ghash_desc_ctx *ctx = shash_desc_ctx(desc); 68 + struct arm_ghash_desc_ctx *ctx = shash_desc_ctx(desc); 64 69 65 - *ctx = (struct ghash_desc_ctx){}; 70 + *ctx = (struct arm_ghash_desc_ctx){}; 66 71 return 0; 67 72 } 68 73 ··· 80 85 static int ghash_update(struct shash_desc *desc, const u8 *src, 81 86 unsigned int len) 82 87 { 83 - struct ghash_desc_ctx *ctx = shash_desc_ctx(desc); 84 - unsigned int partial = ctx->count % GHASH_BLOCK_SIZE; 88 + struct ghash_key *key = crypto_shash_ctx(desc->tfm); 89 + struct arm_ghash_desc_ctx *ctx = shash_desc_ctx(desc); 90 + int blocks; 85 91 86 - ctx->count += len; 92 + blocks = len / GHASH_BLOCK_SIZE; 93 + ghash_do_update(blocks, ctx->digest, src, key, NULL); 94 + return len - blocks * GHASH_BLOCK_SIZE; 95 + } 87 96 88 - if ((partial + len) >= GHASH_BLOCK_SIZE) { 89 - struct ghash_key *key = crypto_shash_ctx(desc->tfm); 90 - int blocks; 97 + static int ghash_export(struct shash_desc *desc, void *out) 98 + { 99 + struct arm_ghash_desc_ctx *ctx = shash_desc_ctx(desc); 100 + u8 *dst = out; 91 101 92 - if (partial) { 93 - int p = GHASH_BLOCK_SIZE - partial; 94 - 95 - memcpy(ctx->buf + partial, src, p); 96 - src += p; 97 - len -= p; 98 - } 99 - 100 - blocks = len / GHASH_BLOCK_SIZE; 101 - len %= GHASH_BLOCK_SIZE; 102 - 103 - ghash_do_update(blocks, ctx->digest, src, key, 104 - partial ? ctx->buf : NULL); 105 - src += blocks * GHASH_BLOCK_SIZE; 106 - partial = 0; 107 - } 108 - if (len) 109 - memcpy(ctx->buf + partial, src, len); 102 + put_unaligned_be64(ctx->digest[1], dst); 103 + put_unaligned_be64(ctx->digest[0], dst + 8); 110 104 return 0; 111 105 } 112 106 113 - static int ghash_final(struct shash_desc *desc, u8 *dst) 107 + static int ghash_import(struct shash_desc *desc, const void *in) 114 108 { 115 - struct ghash_desc_ctx *ctx = shash_desc_ctx(desc); 116 - unsigned int partial = ctx->count % GHASH_BLOCK_SIZE; 109 + struct arm_ghash_desc_ctx *ctx = shash_desc_ctx(desc); 110 + const u8 *src = in; 117 111 118 - if (partial) { 119 - struct ghash_key *key = crypto_shash_ctx(desc->tfm); 120 - 121 - memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial); 122 - ghash_do_update(1, ctx->digest, ctx->buf, key, NULL); 123 - } 124 - put_unaligned_be64(ctx->digest[1], dst); 125 - put_unaligned_be64(ctx->digest[0], dst + 8); 126 - 127 - *ctx = (struct ghash_desc_ctx){}; 112 + ctx->digest[1] = get_unaligned_be64(src); 113 + ctx->digest[0] = get_unaligned_be64(src + 8); 128 114 return 0; 115 + } 116 + 117 + static int ghash_finup(struct shash_desc *desc, const u8 *src, 118 + unsigned int len, u8 *dst) 119 + { 120 + struct ghash_key *key = crypto_shash_ctx(desc->tfm); 121 + struct arm_ghash_desc_ctx *ctx = shash_desc_ctx(desc); 122 + 123 + if (len) { 124 + u8 buf[GHASH_BLOCK_SIZE] = {}; 125 + 126 + memcpy(buf, src, len); 127 + ghash_do_update(1, ctx->digest, buf, key, NULL); 128 + memzero_explicit(buf, sizeof(buf)); 129 + } 130 + return ghash_export(desc, dst); 129 131 } 130 132 131 133 static void ghash_reflect(u64 h[], const be128 *k) ··· 167 175 .digestsize = GHASH_DIGEST_SIZE, 168 176 .init = ghash_init, 169 177 .update = ghash_update, 170 - .final = ghash_final, 178 + .finup = ghash_finup, 171 179 .setkey = ghash_setkey, 172 - .descsize = sizeof(struct ghash_desc_ctx), 180 + .export = ghash_export, 181 + .import = ghash_import, 182 + .descsize = sizeof(struct arm_ghash_desc_ctx), 183 + .statesize = sizeof(struct ghash_desc_ctx), 173 184 174 185 .base.cra_name = "ghash", 175 186 .base.cra_driver_name = "ghash-ce", 176 187 .base.cra_priority = 300, 188 + .base.cra_flags = CRYPTO_AHASH_ALG_BLOCK_ONLY, 177 189 .base.cra_blocksize = GHASH_BLOCK_SIZE, 178 190 .base.cra_ctxsize = sizeof(struct ghash_key) + sizeof(u64[2]), 179 191 .base.cra_module = THIS_MODULE, ··· 313 317 u8 *tag, *dst; 314 318 int tail, err; 315 319 316 - if (WARN_ON_ONCE(!may_use_simd())) 317 - return -EBUSY; 318 - 319 320 err = skcipher_walk_aead_encrypt(&walk, req, false); 320 321 321 322 kernel_neon_begin(); ··· 401 408 const u8 *src; 402 409 u8 *tag, *dst; 403 410 int tail, err, ret; 404 - 405 - if (WARN_ON_ONCE(!may_use_simd())) 406 - return -EBUSY; 407 411 408 412 scatterwalk_map_and_copy(otag, req->src, 409 413 req->assoclen + req->cryptlen - authsize,