Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: arm/ghash - use variably sized key struct

Of the two versions of GHASH that the ARM driver implements, only one
performs aggregation, and so the other one has no use for the powers
of H to be precomputed, or space to be allocated for them in the key
struct. So make the context size dependent on which version is being
selected, and while at it, use a static key to carry this decision,
and get rid of the function pointer.

Signed-off-by: Ard Biesheuvel <ardb@kernel.org>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

authored by

Ard Biesheuvel and committed by
Herbert Xu
3d2df845 e4f87485

+24 -27
+24 -27
arch/arm/crypto/ghash-ce-glue.c
··· 16 16 #include <crypto/gf128mul.h> 17 17 #include <linux/cpufeature.h> 18 18 #include <linux/crypto.h> 19 + #include <linux/jump_label.h> 19 20 #include <linux/module.h> 20 21 21 22 MODULE_DESCRIPTION("GHASH hash function using ARMv8 Crypto Extensions"); ··· 28 27 #define GHASH_DIGEST_SIZE 16 29 28 30 29 struct ghash_key { 31 - u64 h[2]; 32 - u64 h2[2]; 33 - u64 h3[2]; 34 - u64 h4[2]; 35 - 36 30 be128 k; 31 + u64 h[][2]; 37 32 }; 38 33 39 34 struct ghash_desc_ctx { ··· 43 46 }; 44 47 45 48 asmlinkage void pmull_ghash_update_p64(int blocks, u64 dg[], const char *src, 46 - struct ghash_key const *k, 47 - const char *head); 49 + u64 const h[][2], const char *head); 48 50 49 51 asmlinkage void pmull_ghash_update_p8(int blocks, u64 dg[], const char *src, 50 - struct ghash_key const *k, 51 - const char *head); 52 + u64 const h[][2], const char *head); 52 53 53 - static void (*pmull_ghash_update)(int blocks, u64 dg[], const char *src, 54 - struct ghash_key const *k, 55 - const char *head); 54 + static __ro_after_init DEFINE_STATIC_KEY_FALSE(use_p64); 56 55 57 56 static int ghash_init(struct shash_desc *desc) 58 57 { ··· 63 70 { 64 71 if (likely(crypto_simd_usable())) { 65 72 kernel_neon_begin(); 66 - pmull_ghash_update(blocks, dg, src, key, head); 73 + if (static_branch_likely(&use_p64)) 74 + pmull_ghash_update_p64(blocks, dg, src, key->h, head); 75 + else 76 + pmull_ghash_update_p8(blocks, dg, src, key->h, head); 67 77 kernel_neon_end(); 68 78 } else { 69 79 be128 dst = { cpu_to_be64(dg[1]), cpu_to_be64(dg[0]) }; ··· 157 161 const u8 *inkey, unsigned int keylen) 158 162 { 159 163 struct ghash_key *key = crypto_shash_ctx(tfm); 160 - be128 h; 161 164 162 165 if (keylen != GHASH_BLOCK_SIZE) 163 166 return -EINVAL; 164 167 165 168 /* needed for the fallback */ 166 169 memcpy(&key->k, inkey, GHASH_BLOCK_SIZE); 167 - ghash_reflect(key->h, &key->k); 170 + ghash_reflect(key->h[0], &key->k); 168 171 169 - h = key->k; 170 - gf128mul_lle(&h, &key->k); 171 - ghash_reflect(key->h2, &h); 172 + if (static_branch_likely(&use_p64)) { 173 + be128 h = key->k; 172 174 173 - gf128mul_lle(&h, &key->k); 174 - ghash_reflect(key->h3, &h); 175 + gf128mul_lle(&h, &key->k); 176 + ghash_reflect(key->h[1], &h); 175 177 176 - gf128mul_lle(&h, &key->k); 177 - ghash_reflect(key->h4, &h); 178 + gf128mul_lle(&h, &key->k); 179 + ghash_reflect(key->h[2], &h); 178 180 181 + gf128mul_lle(&h, &key->k); 182 + ghash_reflect(key->h[3], &h); 183 + } 179 184 return 0; 180 185 } 181 186 ··· 192 195 .base.cra_driver_name = "ghash-ce-sync", 193 196 .base.cra_priority = 300 - 1, 194 197 .base.cra_blocksize = GHASH_BLOCK_SIZE, 195 - .base.cra_ctxsize = sizeof(struct ghash_key), 198 + .base.cra_ctxsize = sizeof(struct ghash_key) + sizeof(u64[2]), 196 199 .base.cra_module = THIS_MODULE, 197 200 }; 198 201 ··· 351 354 if (!(elf_hwcap & HWCAP_NEON)) 352 355 return -ENODEV; 353 356 354 - if (elf_hwcap2 & HWCAP2_PMULL) 355 - pmull_ghash_update = pmull_ghash_update_p64; 356 - else 357 - pmull_ghash_update = pmull_ghash_update_p8; 357 + if (elf_hwcap2 & HWCAP2_PMULL) { 358 + ghash_alg.base.cra_ctxsize += 3 * sizeof(u64[2]); 359 + static_branch_enable(&use_p64); 360 + } 358 361 359 362 err = crypto_register_shash(&ghash_alg); 360 363 if (err)