Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: arm/ghash - provide a synchronous version

GHASH is used by the GCM mode, which is often used in contexts where
only synchronous ciphers are permitted. So provide a synchronous version
of GHASH based on the existing code. This requires a non-SIMD fallback
to deal with invocations occurring from a context where SIMD instructions
may not be used.

Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

authored by

Ard Biesheuvel and committed by
Herbert Xu
0a5dff98 e5f05040

+52 -26
+52 -26
arch/arm/crypto/ghash-ce-glue.c
··· 9 9 #include <asm/neon.h> 10 10 #include <asm/simd.h> 11 11 #include <asm/unaligned.h> 12 + #include <crypto/b128ops.h> 12 13 #include <crypto/cryptd.h> 13 14 #include <crypto/internal/hash.h> 14 15 #include <crypto/internal/simd.h> ··· 31 30 u64 h2[2]; 32 31 u64 h3[2]; 33 32 u64 h4[2]; 33 + 34 + be128 k; 34 35 }; 35 36 36 37 struct ghash_desc_ctx { ··· 65 62 return 0; 66 63 } 67 64 65 + static void ghash_do_update(int blocks, u64 dg[], const char *src, 66 + struct ghash_key *key, const char *head) 67 + { 68 + if (likely(crypto_simd_usable())) { 69 + kernel_neon_begin(); 70 + pmull_ghash_update(blocks, dg, src, key, head); 71 + kernel_neon_end(); 72 + } else { 73 + be128 dst = { cpu_to_be64(dg[1]), cpu_to_be64(dg[0]) }; 74 + 75 + do { 76 + const u8 *in = src; 77 + 78 + if (head) { 79 + in = head; 80 + blocks++; 81 + head = NULL; 82 + } else { 83 + src += GHASH_BLOCK_SIZE; 84 + } 85 + 86 + crypto_xor((u8 *)&dst, in, GHASH_BLOCK_SIZE); 87 + gf128mul_lle(&dst, &key->k); 88 + } while (--blocks); 89 + 90 + dg[0] = be64_to_cpu(dst.b); 91 + dg[1] = be64_to_cpu(dst.a); 92 + } 93 + } 94 + 68 95 static int ghash_update(struct shash_desc *desc, const u8 *src, 69 96 unsigned int len) 70 97 { ··· 118 85 blocks = len / GHASH_BLOCK_SIZE; 119 86 len %= GHASH_BLOCK_SIZE; 120 87 121 - kernel_neon_begin(); 122 - pmull_ghash_update(blocks, ctx->digest, src, key, 123 - partial ? ctx->buf : NULL); 124 - kernel_neon_end(); 88 + ghash_do_update(blocks, ctx->digest, src, key, 89 + partial ? ctx->buf : NULL); 125 90 src += blocks * GHASH_BLOCK_SIZE; 126 91 partial = 0; 127 92 } ··· 137 106 struct ghash_key *key = crypto_shash_ctx(desc->tfm); 138 107 139 108 memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial); 140 - kernel_neon_begin(); 141 - pmull_ghash_update(1, ctx->digest, ctx->buf, key, NULL); 142 - kernel_neon_end(); 109 + ghash_do_update(1, ctx->digest, ctx->buf, key, NULL); 143 110 } 144 111 put_unaligned_be64(ctx->digest[1], dst); 145 112 put_unaligned_be64(ctx->digest[0], dst + 8); ··· 161 132 const u8 *inkey, unsigned int keylen) 162 133 { 163 134 struct ghash_key *key = crypto_shash_ctx(tfm); 164 - be128 h, k; 135 + be128 h; 165 136 166 137 if (keylen != GHASH_BLOCK_SIZE) { 167 138 crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN); 168 139 return -EINVAL; 169 140 } 170 141 171 - memcpy(&k, inkey, GHASH_BLOCK_SIZE); 172 - ghash_reflect(key->h, &k); 142 + /* needed for the fallback */ 143 + memcpy(&key->k, inkey, GHASH_BLOCK_SIZE); 144 + ghash_reflect(key->h, &key->k); 173 145 174 - h = k; 175 - gf128mul_lle(&h, &k); 146 + h = key->k; 147 + gf128mul_lle(&h, &key->k); 176 148 ghash_reflect(key->h2, &h); 177 149 178 - gf128mul_lle(&h, &k); 150 + gf128mul_lle(&h, &key->k); 179 151 ghash_reflect(key->h3, &h); 180 152 181 - gf128mul_lle(&h, &k); 153 + gf128mul_lle(&h, &key->k); 182 154 ghash_reflect(key->h4, &h); 183 155 184 156 return 0; ··· 192 162 .final = ghash_final, 193 163 .setkey = ghash_setkey, 194 164 .descsize = sizeof(struct ghash_desc_ctx), 195 - .base = { 196 - .cra_name = "__ghash", 197 - .cra_driver_name = "__driver-ghash-ce", 198 - .cra_priority = 0, 199 - .cra_flags = CRYPTO_ALG_INTERNAL, 200 - .cra_blocksize = GHASH_BLOCK_SIZE, 201 - .cra_ctxsize = sizeof(struct ghash_key), 202 - .cra_module = THIS_MODULE, 203 - }, 165 + 166 + .base.cra_name = "ghash", 167 + .base.cra_driver_name = "ghash-ce-sync", 168 + .base.cra_priority = 300 - 1, 169 + .base.cra_blocksize = GHASH_BLOCK_SIZE, 170 + .base.cra_ctxsize = sizeof(struct ghash_key), 171 + .base.cra_module = THIS_MODULE, 204 172 }; 205 173 206 174 static int ghash_async_init(struct ahash_request *req) ··· 313 285 struct cryptd_ahash *cryptd_tfm; 314 286 struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm); 315 287 316 - cryptd_tfm = cryptd_alloc_ahash("__driver-ghash-ce", 317 - CRYPTO_ALG_INTERNAL, 318 - CRYPTO_ALG_INTERNAL); 288 + cryptd_tfm = cryptd_alloc_ahash("ghash-ce-sync", 0, 0); 319 289 if (IS_ERR(cryptd_tfm)) 320 290 return PTR_ERR(cryptd_tfm); 321 291 ctx->cryptd_tfm = cryptd_tfm;