Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: gcm - use crypto_grab_ahash() and simplify error paths

Make the gcm and gcm_base templates use the new function
crypto_grab_ahash() to initialize their ahash spawn.

This is needed to make all spawns be initialized in a consistent way.

Also simplify the error handling by taking advantage of crypto_drop_*()
now accepting (as a no-op) spawns that haven't been initialized yet.

Signed-off-by: Eric Biggers <ebiggers@google.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

authored by

Eric Biggers and committed by
Herbert Xu
ab6ffd36 37073882

+17 -37
+17 -37
crypto/gcm.c
··· 13 13 #include <crypto/scatterwalk.h> 14 14 #include <crypto/gcm.h> 15 15 #include <crypto/hash.h> 16 - #include "internal.h" 17 16 #include <linux/err.h> 18 17 #include <linux/init.h> 19 18 #include <linux/kernel.h> ··· 581 582 struct crypto_attr_type *algt; 582 583 u32 mask; 583 584 struct aead_instance *inst; 584 - struct skcipher_alg *ctr; 585 - struct crypto_alg *ghash_alg; 586 - struct hash_alg_common *ghash; 587 585 struct gcm_instance_ctx *ctx; 586 + struct skcipher_alg *ctr; 587 + struct hash_alg_common *ghash; 588 588 int err; 589 589 590 590 algt = crypto_get_attr_type(tb); ··· 595 597 596 598 mask = crypto_requires_sync(algt->type, algt->mask); 597 599 598 - ghash_alg = crypto_find_alg(ghash_name, &crypto_ahash_type, 599 - CRYPTO_ALG_TYPE_HASH, 600 - CRYPTO_ALG_TYPE_AHASH_MASK | mask); 601 - if (IS_ERR(ghash_alg)) 602 - return PTR_ERR(ghash_alg); 603 - 604 - ghash = __crypto_hash_alg_common(ghash_alg); 605 - 606 - err = -ENOMEM; 607 600 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); 608 601 if (!inst) 609 - goto out_put_ghash; 610 - 602 + return -ENOMEM; 611 603 ctx = aead_instance_ctx(inst); 612 - err = crypto_init_ahash_spawn(&ctx->ghash, ghash, 613 - aead_crypto_instance(inst)); 604 + 605 + err = crypto_grab_ahash(&ctx->ghash, aead_crypto_instance(inst), 606 + ghash_name, 0, mask); 614 607 if (err) 615 608 goto err_free_inst; 609 + ghash = crypto_spawn_ahash_alg(&ctx->ghash); 616 610 617 611 err = -EINVAL; 618 612 if (strcmp(ghash->base.cra_name, "ghash") != 0 || 619 613 ghash->digestsize != 16) 620 - goto err_drop_ghash; 614 + goto err_free_inst; 621 615 622 616 err = crypto_grab_skcipher(&ctx->ctr, aead_crypto_instance(inst), 623 617 ctr_name, 0, mask); 624 618 if (err) 625 - goto err_drop_ghash; 626 - 619 + goto err_free_inst; 627 620 ctr = crypto_spawn_skcipher_alg(&ctx->ctr); 628 621 629 622 /* The skcipher algorithm must be CTR mode, using 16-byte blocks. */ ··· 622 633 if (strncmp(ctr->base.cra_name, "ctr(", 4) != 0 || 623 634 crypto_skcipher_alg_ivsize(ctr) != 16 || 624 635 ctr->base.cra_blocksize != 1) 625 - goto out_put_ctr; 636 + goto err_free_inst; 626 637 627 638 err = -ENAMETOOLONG; 628 639 if (snprintf(inst->alg.base.cra_name, CRYPTO_MAX_ALG_NAME, 629 640 "gcm(%s", ctr->base.cra_name + 4) >= CRYPTO_MAX_ALG_NAME) 630 - goto out_put_ctr; 641 + goto err_free_inst; 631 642 632 643 if (snprintf(inst->alg.base.cra_driver_name, CRYPTO_MAX_ALG_NAME, 633 644 "gcm_base(%s,%s)", ctr->base.cra_driver_name, 634 - ghash_alg->cra_driver_name) >= 645 + ghash->base.cra_driver_name) >= 635 646 CRYPTO_MAX_ALG_NAME) 636 - goto out_put_ctr; 647 + goto err_free_inst; 637 648 638 649 inst->alg.base.cra_flags = (ghash->base.cra_flags | 639 650 ctr->base.cra_flags) & CRYPTO_ALG_ASYNC; ··· 656 667 inst->free = crypto_gcm_free; 657 668 658 669 err = aead_register_instance(tmpl, inst); 659 - if (err) 660 - goto out_put_ctr; 661 - 662 - out_put_ghash: 663 - crypto_mod_put(ghash_alg); 664 - return err; 665 - 666 - out_put_ctr: 667 - crypto_drop_skcipher(&ctx->ctr); 668 - err_drop_ghash: 669 - crypto_drop_ahash(&ctx->ghash); 670 + if (err) { 670 671 err_free_inst: 671 - kfree(inst); 672 - goto out_put_ghash; 672 + crypto_gcm_free(inst); 673 + } 674 + return err; 673 675 } 674 676 675 677 static int crypto_gcm_create(struct crypto_template *tmpl, struct rtattr **tb)