Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: omap-aes-gcm - use the AES library to encrypt the tag

The OMAP AES-GCM implementation uses a fallback ecb(aes) skcipher to
produce the keystream to encrypt the output tag. Let's use the new
AES library instead - this is much simpler, and shouldn't affect
performance given that it only involves a single block.

Signed-off-by: Ard Biesheuvel <ardb@kernel.org>
Reviewed-by: Tero Kristo <t-kristo@ti.com>
Tested-by: Tero Kristo <t-kristo@ti.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

authored by

Ard Biesheuvel and committed by
Herbert Xu
f0956d42 b877ad1a

+33 -98
+25 -73
drivers/crypto/omap-aes-gcm.c
··· 167 167 return 0; 168 168 } 169 169 170 - static void omap_aes_gcm_complete(struct crypto_async_request *req, int err) 171 - { 172 - struct omap_aes_gcm_result *res = req->data; 173 - 174 - if (err == -EINPROGRESS) 175 - return; 176 - 177 - res->err = err; 178 - complete(&res->completion); 179 - } 180 - 181 170 static int do_encrypt_iv(struct aead_request *req, u32 *tag, u32 *iv) 182 171 { 183 - struct scatterlist iv_sg, tag_sg; 184 - struct skcipher_request *sk_req; 185 - struct omap_aes_gcm_result result; 186 - struct omap_aes_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); 187 - int ret = 0; 172 + struct omap_aes_gcm_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); 188 173 189 - sk_req = skcipher_request_alloc(ctx->ctr, GFP_KERNEL); 190 - if (!sk_req) { 191 - pr_err("skcipher: Failed to allocate request\n"); 192 - return -ENOMEM; 193 - } 194 - 195 - init_completion(&result.completion); 196 - 197 - sg_init_one(&iv_sg, iv, AES_BLOCK_SIZE); 198 - sg_init_one(&tag_sg, tag, AES_BLOCK_SIZE); 199 - skcipher_request_set_callback(sk_req, CRYPTO_TFM_REQ_MAY_BACKLOG, 200 - omap_aes_gcm_complete, &result); 201 - ret = crypto_skcipher_setkey(ctx->ctr, (u8 *)ctx->key, ctx->keylen); 202 - skcipher_request_set_crypt(sk_req, &iv_sg, &tag_sg, AES_BLOCK_SIZE, 203 - NULL); 204 - ret = crypto_skcipher_encrypt(sk_req); 205 - switch (ret) { 206 - case 0: 207 - break; 208 - case -EINPROGRESS: 209 - case -EBUSY: 210 - ret = wait_for_completion_interruptible(&result.completion); 211 - if (!ret) { 212 - ret = result.err; 213 - if (!ret) { 214 - reinit_completion(&result.completion); 215 - break; 216 - } 217 - } 218 - /* fall through */ 219 - default: 220 - pr_err("Encryption of IV failed for GCM mode\n"); 221 - break; 222 - } 223 - 224 - skcipher_request_free(sk_req); 225 - return ret; 174 + aes_encrypt(&ctx->actx, (u8 *)tag, (u8 *)iv); 175 + return 0; 226 176 } 227 177 228 178 void omap_aes_gcm_dma_out_callback(void *data) ··· 202 252 static int omap_aes_gcm_handle_queue(struct omap_aes_dev *dd, 203 253 struct aead_request *req) 204 254 { 205 - struct omap_aes_ctx *ctx; 255 + struct omap_aes_gcm_ctx *ctx; 206 256 struct aead_request *backlog; 207 257 struct omap_aes_reqctx *rctx; 208 258 unsigned long flags; ··· 231 281 ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); 232 282 rctx = aead_request_ctx(req); 233 283 234 - dd->ctx = ctx; 284 + dd->ctx = &ctx->octx; 235 285 rctx->dd = dd; 236 286 dd->aead_req = req; 237 287 ··· 310 360 311 361 int omap_aes_4106gcm_encrypt(struct aead_request *req) 312 362 { 313 - struct omap_aes_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); 363 + struct omap_aes_gcm_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); 314 364 struct omap_aes_reqctx *rctx = aead_request_ctx(req); 315 365 316 - memcpy(rctx->iv, ctx->nonce, 4); 366 + memcpy(rctx->iv, ctx->octx.nonce, 4); 317 367 memcpy(rctx->iv + 4, req->iv, 8); 318 368 return crypto_ipsec_check_assoclen(req->assoclen) ?: 319 369 omap_aes_gcm_crypt(req, FLAGS_ENCRYPT | FLAGS_GCM | ··· 322 372 323 373 int omap_aes_4106gcm_decrypt(struct aead_request *req) 324 374 { 325 - struct omap_aes_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); 375 + struct omap_aes_gcm_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req)); 326 376 struct omap_aes_reqctx *rctx = aead_request_ctx(req); 327 377 328 - memcpy(rctx->iv, ctx->nonce, 4); 378 + memcpy(rctx->iv, ctx->octx.nonce, 4); 329 379 memcpy(rctx->iv + 4, req->iv, 8); 330 380 return crypto_ipsec_check_assoclen(req->assoclen) ?: 331 381 omap_aes_gcm_crypt(req, FLAGS_GCM | FLAGS_RFC4106_GCM); ··· 334 384 int omap_aes_gcm_setkey(struct crypto_aead *tfm, const u8 *key, 335 385 unsigned int keylen) 336 386 { 337 - struct omap_aes_ctx *ctx = crypto_aead_ctx(tfm); 387 + struct omap_aes_gcm_ctx *ctx = crypto_aead_ctx(tfm); 388 + int ret; 338 389 339 - if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 && 340 - keylen != AES_KEYSIZE_256) 341 - return -EINVAL; 390 + ret = aes_expandkey(&ctx->actx, key, keylen); 391 + if (ret) 392 + return ret; 342 393 343 - memcpy(ctx->key, key, keylen); 344 - ctx->keylen = keylen; 394 + memcpy(ctx->octx.key, key, keylen); 395 + ctx->octx.keylen = keylen; 345 396 346 397 return 0; 347 398 } ··· 350 399 int omap_aes_4106gcm_setkey(struct crypto_aead *tfm, const u8 *key, 351 400 unsigned int keylen) 352 401 { 353 - struct omap_aes_ctx *ctx = crypto_aead_ctx(tfm); 402 + struct omap_aes_gcm_ctx *ctx = crypto_aead_ctx(tfm); 403 + int ret; 354 404 355 405 if (keylen < 4) 356 406 return -EINVAL; 357 - 358 407 keylen -= 4; 359 - if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 && 360 - keylen != AES_KEYSIZE_256) 361 - return -EINVAL; 362 408 363 - memcpy(ctx->key, key, keylen); 364 - memcpy(ctx->nonce, key + keylen, 4); 365 - ctx->keylen = keylen; 409 + ret = aes_expandkey(&ctx->actx, key, keylen); 410 + if (ret) 411 + return ret; 412 + 413 + memcpy(ctx->octx.key, key, keylen); 414 + memcpy(ctx->octx.nonce, key + keylen, 4); 415 + ctx->octx.keylen = keylen; 366 416 367 417 return 0; 368 418 }
+2 -24
drivers/crypto/omap-aes.c
··· 645 645 static int omap_aes_gcm_cra_init(struct crypto_aead *tfm) 646 646 { 647 647 struct omap_aes_dev *dd = NULL; 648 - struct omap_aes_ctx *ctx = crypto_aead_ctx(tfm); 649 648 int err; 650 649 651 650 /* Find AES device, currently picks the first device */ ··· 662 663 } 663 664 664 665 tfm->reqsize = sizeof(struct omap_aes_reqctx); 665 - ctx->ctr = crypto_alloc_skcipher("ecb(aes)", 0, 0); 666 - if (IS_ERR(ctx->ctr)) { 667 - pr_warn("could not load aes driver for encrypting IV\n"); 668 - return PTR_ERR(ctx->ctr); 669 - } 670 - 671 666 return 0; 672 667 } 673 668 ··· 673 680 crypto_free_sync_skcipher(ctx->fallback); 674 681 675 682 ctx->fallback = NULL; 676 - } 677 - 678 - static void omap_aes_gcm_cra_exit(struct crypto_aead *tfm) 679 - { 680 - struct omap_aes_ctx *ctx = crypto_aead_ctx(tfm); 681 - 682 - if (ctx->fallback) 683 - crypto_free_sync_skcipher(ctx->fallback); 684 - 685 - ctx->fallback = NULL; 686 - 687 - if (ctx->ctr) 688 - crypto_free_skcipher(ctx->ctr); 689 683 } 690 684 691 685 /* ********************** ALGS ************************************ */ ··· 758 778 .cra_flags = CRYPTO_ALG_ASYNC | 759 779 CRYPTO_ALG_KERN_DRIVER_ONLY, 760 780 .cra_blocksize = 1, 761 - .cra_ctxsize = sizeof(struct omap_aes_ctx), 781 + .cra_ctxsize = sizeof(struct omap_aes_gcm_ctx), 762 782 .cra_alignmask = 0xf, 763 783 .cra_module = THIS_MODULE, 764 784 }, 765 785 .init = omap_aes_gcm_cra_init, 766 - .exit = omap_aes_gcm_cra_exit, 767 786 .ivsize = GCM_AES_IV_SIZE, 768 787 .maxauthsize = AES_BLOCK_SIZE, 769 788 .setkey = omap_aes_gcm_setkey, ··· 778 799 .cra_flags = CRYPTO_ALG_ASYNC | 779 800 CRYPTO_ALG_KERN_DRIVER_ONLY, 780 801 .cra_blocksize = 1, 781 - .cra_ctxsize = sizeof(struct omap_aes_ctx), 802 + .cra_ctxsize = sizeof(struct omap_aes_gcm_ctx), 782 803 .cra_alignmask = 0xf, 783 804 .cra_module = THIS_MODULE, 784 805 }, 785 806 .init = omap_aes_gcm_cra_init, 786 - .exit = omap_aes_gcm_cra_exit, 787 807 .maxauthsize = AES_BLOCK_SIZE, 788 808 .ivsize = GCM_RFC4106_IV_SIZE, 789 809 .setkey = omap_aes_4106gcm_setkey,
+6 -1
drivers/crypto/omap-aes.h
··· 9 9 #ifndef __OMAP_AES_H__ 10 10 #define __OMAP_AES_H__ 11 11 12 + #include <crypto/aes.h> 12 13 #include <crypto/engine.h> 13 14 14 15 #define DST_MAXBURST 4 ··· 99 98 u32 key[AES_KEYSIZE_256 / sizeof(u32)]; 100 99 u8 nonce[4]; 101 100 struct crypto_sync_skcipher *fallback; 102 - struct crypto_skcipher *ctr; 101 + }; 102 + 103 + struct omap_aes_gcm_ctx { 104 + struct omap_aes_ctx octx; 105 + struct crypto_aes_ctx actx; 103 106 }; 104 107 105 108 struct omap_aes_reqctx {