Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: arm/aes-neonbs - provide a synchronous version of ctr(aes)

AES in CTR mode is used by modes such as GCM and CCM, which are often
used in contexts where only synchronous ciphers are permitted. So
provide a synchronous version of ctr(aes) based on the existing code.
This requires a non-SIMD fallback to deal with invocations occurring
from a context where SIMD instructions may not be used. We have a
helper for this now in the AES library, so wire that up.

Signed-off-by: Ard Biesheuvel <ard.biesheuvel@linaro.org>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

authored by

Ard Biesheuvel and committed by
Herbert Xu
e5f05040 5eedf315

+65
+65
arch/arm/crypto/aes-neonbs-glue.c
··· 6 6 */ 7 7 8 8 #include <asm/neon.h> 9 + #include <asm/simd.h> 9 10 #include <crypto/aes.h> 10 11 #include <crypto/cbc.h> 12 + #include <crypto/ctr.h> 11 13 #include <crypto/internal/simd.h> 12 14 #include <crypto/internal/skcipher.h> 13 15 #include <crypto/xts.h> ··· 54 52 struct aesbs_xts_ctx { 55 53 struct aesbs_ctx key; 56 54 struct crypto_cipher *tweak_tfm; 55 + }; 56 + 57 + struct aesbs_ctr_ctx { 58 + struct aesbs_ctx key; /* must be first member */ 59 + struct crypto_aes_ctx fallback; 57 60 }; 58 61 59 62 static int aesbs_setkey(struct crypto_skcipher *tfm, const u8 *in_key, ··· 196 189 crypto_free_cipher(ctx->enc_tfm); 197 190 } 198 191 192 + static int aesbs_ctr_setkey_sync(struct crypto_skcipher *tfm, const u8 *in_key, 193 + unsigned int key_len) 194 + { 195 + struct aesbs_ctr_ctx *ctx = crypto_skcipher_ctx(tfm); 196 + int err; 197 + 198 + err = aes_expandkey(&ctx->fallback, in_key, key_len); 199 + if (err) 200 + return err; 201 + 202 + ctx->key.rounds = 6 + key_len / 4; 203 + 204 + kernel_neon_begin(); 205 + aesbs_convert_key(ctx->key.rk, ctx->fallback.key_enc, ctx->key.rounds); 206 + kernel_neon_end(); 207 + 208 + return 0; 209 + } 210 + 199 211 static int ctr_encrypt(struct skcipher_request *req) 200 212 { 201 213 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); ··· 255 229 kernel_neon_end(); 256 230 257 231 return err; 232 + } 233 + 234 + static void ctr_encrypt_one(struct crypto_skcipher *tfm, const u8 *src, u8 *dst) 235 + { 236 + struct aesbs_ctr_ctx *ctx = crypto_skcipher_ctx(tfm); 237 + unsigned long flags; 238 + 239 + /* 240 + * Temporarily disable interrupts to avoid races where 241 + * cachelines are evicted when the CPU is interrupted 242 + * to do something else. 243 + */ 244 + local_irq_save(flags); 245 + aes_encrypt(&ctx->fallback, dst, src); 246 + local_irq_restore(flags); 247 + } 248 + 249 + static int ctr_encrypt_sync(struct skcipher_request *req) 250 + { 251 + if (!crypto_simd_usable()) 252 + return crypto_ctr_encrypt_walk(req, ctr_encrypt_one); 253 + 254 + return ctr_encrypt(req); 258 255 } 259 256 260 257 static int aesbs_xts_setkey(struct crypto_skcipher *tfm, const u8 *in_key, ··· 407 358 .setkey = aesbs_setkey, 408 359 .encrypt = ctr_encrypt, 409 360 .decrypt = ctr_encrypt, 361 + }, { 362 + .base.cra_name = "ctr(aes)", 363 + .base.cra_driver_name = "ctr-aes-neonbs-sync", 364 + .base.cra_priority = 250 - 1, 365 + .base.cra_blocksize = 1, 366 + .base.cra_ctxsize = sizeof(struct aesbs_ctr_ctx), 367 + .base.cra_module = THIS_MODULE, 368 + 369 + .min_keysize = AES_MIN_KEY_SIZE, 370 + .max_keysize = AES_MAX_KEY_SIZE, 371 + .chunksize = AES_BLOCK_SIZE, 372 + .walksize = 8 * AES_BLOCK_SIZE, 373 + .ivsize = AES_BLOCK_SIZE, 374 + .setkey = aesbs_ctr_setkey_sync, 375 + .encrypt = ctr_encrypt_sync, 376 + .decrypt = ctr_encrypt_sync, 410 377 }, { 411 378 .base.cra_name = "__xts(aes)", 412 379 .base.cra_driver_name = "__xts-aes-neonbs",