Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: algapi - use common mechanism for inheriting flags

The flag CRYPTO_ALG_ASYNC is "inherited" in the sense that when a
template is instantiated, the template will have CRYPTO_ALG_ASYNC set if
any of the algorithms it uses has CRYPTO_ALG_ASYNC set.

We'd like to add a second flag (CRYPTO_ALG_ALLOCATES_MEMORY) that gets
"inherited" in the same way. This is difficult because the handling of
CRYPTO_ALG_ASYNC is hardcoded everywhere. Address this by:

- Add CRYPTO_ALG_INHERITED_FLAGS, which contains the set of flags that
have these inheritance semantics.

- Add crypto_algt_inherited_mask(), for use by template ->create()
methods. It returns any of these flags that the user asked to be
unset and thus must be passed in the 'mask' to crypto_grab_*().

- Also modify crypto_check_attr_type() to handle computing the 'mask'
so that most templates can just use this.

- Make crypto_grab_*() propagate these flags to the template instance
being created so that templates don't have to do this themselves.

Make crypto/simd.c propagate these flags too, since it "wraps" another
algorithm, similar to a template.

Based on a patch by Mikulas Patocka <mpatocka@redhat.com>
(https://lore.kernel.org/r/alpine.LRH.2.02.2006301414580.30526@file01.intranet.prod.int.rdu2.redhat.com).

Signed-off-by: Eric Biggers <ebiggers@google.com>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

authored by

Eric Biggers and committed by
Herbert Xu
7bcb2c99 4688111e

+153 -234
+3 -11
crypto/adiantum.c
··· 490 490 491 491 static int adiantum_create(struct crypto_template *tmpl, struct rtattr **tb) 492 492 { 493 - struct crypto_attr_type *algt; 494 493 u32 mask; 495 494 const char *nhpoly1305_name; 496 495 struct skcipher_instance *inst; ··· 499 500 struct shash_alg *hash_alg; 500 501 int err; 501 502 502 - algt = crypto_get_attr_type(tb); 503 - if (IS_ERR(algt)) 504 - return PTR_ERR(algt); 505 - 506 - if ((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask) 507 - return -EINVAL; 508 - 509 - mask = crypto_requires_sync(algt->type, algt->mask); 503 + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SKCIPHER, &mask); 504 + if (err) 505 + return err; 510 506 511 507 inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL); 512 508 if (!inst) ··· 559 565 hash_alg->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME) 560 566 goto err_free_inst; 561 567 562 - inst->alg.base.cra_flags = streamcipher_alg->base.cra_flags & 563 - CRYPTO_ALG_ASYNC; 564 568 inst->alg.base.cra_blocksize = BLOCKCIPHER_BLOCK_SIZE; 565 569 inst->alg.base.cra_ctxsize = sizeof(struct adiantum_tfm_ctx); 566 570 inst->alg.base.cra_alignmask = streamcipher_alg->base.cra_alignmask |
+20 -1
crypto/algapi.c
··· 690 690 spawn->mask = mask; 691 691 spawn->next = inst->spawns; 692 692 inst->spawns = spawn; 693 + inst->alg.cra_flags |= 694 + (alg->cra_flags & CRYPTO_ALG_INHERITED_FLAGS); 693 695 err = 0; 694 696 } 695 697 up_write(&crypto_alg_sem); ··· 818 816 } 819 817 EXPORT_SYMBOL_GPL(crypto_get_attr_type); 820 818 821 - int crypto_check_attr_type(struct rtattr **tb, u32 type) 819 + /** 820 + * crypto_check_attr_type() - check algorithm type and compute inherited mask 821 + * @tb: the template parameters 822 + * @type: the algorithm type the template would be instantiated as 823 + * @mask_ret: (output) the mask that should be passed to crypto_grab_*() 824 + * to restrict the flags of any inner algorithms 825 + * 826 + * Validate that the algorithm type the user requested is compatible with the 827 + * one the template would actually be instantiated as. E.g., if the user is 828 + * doing crypto_alloc_shash("cbc(aes)", ...), this would return an error because 829 + * the "cbc" template creates an "skcipher" algorithm, not an "shash" algorithm. 830 + * 831 + * Also compute the mask to use to restrict the flags of any inner algorithms. 832 + * 833 + * Return: 0 on success; -errno on failure 834 + */ 835 + int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret) 822 836 { 823 837 struct crypto_attr_type *algt; 824 838 ··· 845 827 if ((algt->type ^ type) & algt->mask) 846 828 return -EINVAL; 847 829 830 + *mask_ret = crypto_algt_inherited_mask(algt); 848 831 return 0; 849 832 } 850 833 EXPORT_SYMBOL_GPL(crypto_check_attr_type);
+3 -11
crypto/authenc.c
··· 372 372 static int crypto_authenc_create(struct crypto_template *tmpl, 373 373 struct rtattr **tb) 374 374 { 375 - struct crypto_attr_type *algt; 376 375 u32 mask; 377 376 struct aead_instance *inst; 378 377 struct authenc_instance_ctx *ctx; ··· 380 381 struct skcipher_alg *enc; 381 382 int err; 382 383 383 - algt = crypto_get_attr_type(tb); 384 - if (IS_ERR(algt)) 385 - return PTR_ERR(algt); 386 - 387 - if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) 388 - return -EINVAL; 389 - 390 - mask = crypto_requires_sync(algt->type, algt->mask); 384 + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD, &mask); 385 + if (err) 386 + return err; 391 387 392 388 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); 393 389 if (!inst) ··· 417 423 enc->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME) 418 424 goto err_free_inst; 419 425 420 - inst->alg.base.cra_flags = (auth_base->cra_flags | 421 - enc->base.cra_flags) & CRYPTO_ALG_ASYNC; 422 426 inst->alg.base.cra_priority = enc->base.cra_priority * 10 + 423 427 auth_base->cra_priority; 424 428 inst->alg.base.cra_blocksize = enc->base.cra_blocksize;
+3 -11
crypto/authencesn.c
··· 390 390 static int crypto_authenc_esn_create(struct crypto_template *tmpl, 391 391 struct rtattr **tb) 392 392 { 393 - struct crypto_attr_type *algt; 394 393 u32 mask; 395 394 struct aead_instance *inst; 396 395 struct authenc_esn_instance_ctx *ctx; ··· 398 399 struct skcipher_alg *enc; 399 400 int err; 400 401 401 - algt = crypto_get_attr_type(tb); 402 - if (IS_ERR(algt)) 403 - return PTR_ERR(algt); 404 - 405 - if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) 406 - return -EINVAL; 407 - 408 - mask = crypto_requires_sync(algt->type, algt->mask); 402 + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD, &mask); 403 + if (err) 404 + return err; 409 405 410 406 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); 411 407 if (!inst) ··· 431 437 enc->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME) 432 438 goto err_free_inst; 433 439 434 - inst->alg.base.cra_flags = (auth_base->cra_flags | 435 - enc->base.cra_flags) & CRYPTO_ALG_ASYNC; 436 440 inst->alg.base.cra_priority = enc->base.cra_priority * 10 + 437 441 auth_base->cra_priority; 438 442 inst->alg.base.cra_blocksize = enc->base.cra_blocksize;
+10 -23
crypto/ccm.c
··· 447 447 const char *ctr_name, 448 448 const char *mac_name) 449 449 { 450 - struct crypto_attr_type *algt; 451 450 u32 mask; 452 451 struct aead_instance *inst; 453 452 struct ccm_instance_ctx *ictx; ··· 454 455 struct hash_alg_common *mac; 455 456 int err; 456 457 457 - algt = crypto_get_attr_type(tb); 458 - if (IS_ERR(algt)) 459 - return PTR_ERR(algt); 460 - 461 - if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) 462 - return -EINVAL; 463 - 464 - mask = crypto_requires_sync(algt->type, algt->mask); 458 + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD, &mask); 459 + if (err) 460 + return err; 465 461 466 462 inst = kzalloc(sizeof(*inst) + sizeof(*ictx), GFP_KERNEL); 467 463 if (!inst) ··· 464 470 ictx = aead_instance_ctx(inst); 465 471 466 472 err = crypto_grab_ahash(&ictx->mac, aead_crypto_instance(inst), 467 - mac_name, 0, CRYPTO_ALG_ASYNC); 473 + mac_name, 0, mask | CRYPTO_ALG_ASYNC); 468 474 if (err) 469 475 goto err_free_inst; 470 476 mac = crypto_spawn_ahash_alg(&ictx->mac); ··· 501 507 mac->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME) 502 508 goto err_free_inst; 503 509 504 - inst->alg.base.cra_flags = ctr->base.cra_flags & CRYPTO_ALG_ASYNC; 505 510 inst->alg.base.cra_priority = (mac->base.cra_priority + 506 511 ctr->base.cra_priority) / 2; 507 512 inst->alg.base.cra_blocksize = 1; ··· 705 712 static int crypto_rfc4309_create(struct crypto_template *tmpl, 706 713 struct rtattr **tb) 707 714 { 708 - struct crypto_attr_type *algt; 709 715 u32 mask; 710 716 struct aead_instance *inst; 711 717 struct crypto_aead_spawn *spawn; 712 718 struct aead_alg *alg; 713 719 int err; 714 720 715 - algt = crypto_get_attr_type(tb); 716 - if (IS_ERR(algt)) 717 - return PTR_ERR(algt); 718 - 719 - if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) 720 - return -EINVAL; 721 - 722 - mask = crypto_requires_sync(algt->type, algt->mask); 721 + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD, &mask); 722 + if (err) 723 + return err; 723 724 724 725 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); 725 726 if (!inst) ··· 746 759 CRYPTO_MAX_ALG_NAME) 747 760 goto err_free_inst; 748 761 749 - inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC; 750 762 inst->alg.base.cra_priority = alg->base.cra_priority; 751 763 inst->alg.base.cra_blocksize = 1; 752 764 inst->alg.base.cra_alignmask = alg->base.cra_alignmask; ··· 864 878 struct shash_instance *inst; 865 879 struct crypto_cipher_spawn *spawn; 866 880 struct crypto_alg *alg; 881 + u32 mask; 867 882 int err; 868 883 869 - err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH); 884 + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH, &mask); 870 885 if (err) 871 886 return err; 872 887 ··· 877 890 spawn = shash_instance_ctx(inst); 878 891 879 892 err = crypto_grab_cipher(spawn, shash_crypto_instance(inst), 880 - crypto_attr_alg_name(tb[1]), 0, 0); 893 + crypto_attr_alg_name(tb[1]), 0, mask); 881 894 if (err) 882 895 goto err_free_inst; 883 896 alg = crypto_spawn_cipher_alg(spawn);
+3 -11
crypto/chacha20poly1305.c
··· 555 555 static int chachapoly_create(struct crypto_template *tmpl, struct rtattr **tb, 556 556 const char *name, unsigned int ivsize) 557 557 { 558 - struct crypto_attr_type *algt; 559 558 u32 mask; 560 559 struct aead_instance *inst; 561 560 struct chachapoly_instance_ctx *ctx; ··· 565 566 if (ivsize > CHACHAPOLY_IV_SIZE) 566 567 return -EINVAL; 567 568 568 - algt = crypto_get_attr_type(tb); 569 - if (IS_ERR(algt)) 570 - return PTR_ERR(algt); 571 - 572 - if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) 573 - return -EINVAL; 574 - 575 - mask = crypto_requires_sync(algt->type, algt->mask); 569 + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD, &mask); 570 + if (err) 571 + return err; 576 572 577 573 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); 578 574 if (!inst) ··· 607 613 poly->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME) 608 614 goto err_free_inst; 609 615 610 - inst->alg.base.cra_flags = (chacha->base.cra_flags | 611 - poly->base.cra_flags) & CRYPTO_ALG_ASYNC; 612 616 inst->alg.base.cra_priority = (chacha->base.cra_priority + 613 617 poly->base.cra_priority) / 2; 614 618 inst->alg.base.cra_blocksize = 1;
+3 -2
crypto/cmac.c
··· 225 225 struct crypto_cipher_spawn *spawn; 226 226 struct crypto_alg *alg; 227 227 unsigned long alignmask; 228 + u32 mask; 228 229 int err; 229 230 230 - err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH); 231 + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH, &mask); 231 232 if (err) 232 233 return err; 233 234 ··· 238 237 spawn = shash_instance_ctx(inst); 239 238 240 239 err = crypto_grab_cipher(spawn, shash_crypto_instance(inst), 241 - crypto_attr_alg_name(tb[1]), 0, 0); 240 + crypto_attr_alg_name(tb[1]), 0, mask); 242 241 if (err) 243 242 goto err_free_inst; 244 243 alg = crypto_spawn_cipher_alg(spawn);
+30 -29
crypto/cryptd.c
··· 191 191 return ictx->queue; 192 192 } 193 193 194 - static inline void cryptd_check_internal(struct rtattr **tb, u32 *type, 195 - u32 *mask) 194 + static void cryptd_type_and_mask(struct crypto_attr_type *algt, 195 + u32 *type, u32 *mask) 196 196 { 197 - struct crypto_attr_type *algt; 197 + /* 198 + * cryptd is allowed to wrap internal algorithms, but in that case the 199 + * resulting cryptd instance will be marked as internal as well. 200 + */ 201 + *type = algt->type & CRYPTO_ALG_INTERNAL; 202 + *mask = algt->mask & CRYPTO_ALG_INTERNAL; 198 203 199 - algt = crypto_get_attr_type(tb); 200 - if (IS_ERR(algt)) 201 - return; 204 + /* No point in cryptd wrapping an algorithm that's already async. */ 205 + *mask |= CRYPTO_ALG_ASYNC; 202 206 203 - *type |= algt->type & CRYPTO_ALG_INTERNAL; 204 - *mask |= algt->mask & CRYPTO_ALG_INTERNAL; 207 + *mask |= crypto_algt_inherited_mask(algt); 205 208 } 206 209 207 210 static int cryptd_init_instance(struct crypto_instance *inst, ··· 367 364 368 365 static int cryptd_create_skcipher(struct crypto_template *tmpl, 369 366 struct rtattr **tb, 367 + struct crypto_attr_type *algt, 370 368 struct cryptd_queue *queue) 371 369 { 372 370 struct skcipherd_instance_ctx *ctx; ··· 377 373 u32 mask; 378 374 int err; 379 375 380 - type = 0; 381 - mask = CRYPTO_ALG_ASYNC; 382 - 383 - cryptd_check_internal(tb, &type, &mask); 376 + cryptd_type_and_mask(algt, &type, &mask); 384 377 385 378 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); 386 379 if (!inst) ··· 396 395 if (err) 397 396 goto err_free_inst; 398 397 399 - inst->alg.base.cra_flags = CRYPTO_ALG_ASYNC | 400 - (alg->base.cra_flags & CRYPTO_ALG_INTERNAL); 401 - 398 + inst->alg.base.cra_flags |= CRYPTO_ALG_ASYNC | 399 + (alg->base.cra_flags & CRYPTO_ALG_INTERNAL); 402 400 inst->alg.ivsize = crypto_skcipher_alg_ivsize(alg); 403 401 inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg); 404 402 inst->alg.min_keysize = crypto_skcipher_alg_min_keysize(alg); ··· 633 633 } 634 634 635 635 static int cryptd_create_hash(struct crypto_template *tmpl, struct rtattr **tb, 636 + struct crypto_attr_type *algt, 636 637 struct cryptd_queue *queue) 637 638 { 638 639 struct hashd_instance_ctx *ctx; 639 640 struct ahash_instance *inst; 640 641 struct shash_alg *alg; 641 - u32 type = 0; 642 - u32 mask = 0; 642 + u32 type; 643 + u32 mask; 643 644 int err; 644 645 645 - cryptd_check_internal(tb, &type, &mask); 646 + cryptd_type_and_mask(algt, &type, &mask); 646 647 647 648 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); 648 649 if (!inst) ··· 662 661 if (err) 663 662 goto err_free_inst; 664 663 665 - inst->alg.halg.base.cra_flags = CRYPTO_ALG_ASYNC | 666 - (alg->base.cra_flags & (CRYPTO_ALG_INTERNAL | 664 + inst->alg.halg.base.cra_flags |= CRYPTO_ALG_ASYNC | 665 + (alg->base.cra_flags & (CRYPTO_ALG_INTERNAL| 667 666 CRYPTO_ALG_OPTIONAL_KEY)); 668 - 669 667 inst->alg.halg.digestsize = alg->digestsize; 670 668 inst->alg.halg.statesize = alg->statesize; 671 669 inst->alg.halg.base.cra_ctxsize = sizeof(struct cryptd_hash_ctx); ··· 820 820 821 821 static int cryptd_create_aead(struct crypto_template *tmpl, 822 822 struct rtattr **tb, 823 + struct crypto_attr_type *algt, 823 824 struct cryptd_queue *queue) 824 825 { 825 826 struct aead_instance_ctx *ctx; 826 827 struct aead_instance *inst; 827 828 struct aead_alg *alg; 828 - u32 type = 0; 829 - u32 mask = CRYPTO_ALG_ASYNC; 829 + u32 type; 830 + u32 mask; 830 831 int err; 831 832 832 - cryptd_check_internal(tb, &type, &mask); 833 + cryptd_type_and_mask(algt, &type, &mask); 833 834 834 835 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); 835 836 if (!inst) ··· 849 848 if (err) 850 849 goto err_free_inst; 851 850 852 - inst->alg.base.cra_flags = CRYPTO_ALG_ASYNC | 853 - (alg->base.cra_flags & CRYPTO_ALG_INTERNAL); 851 + inst->alg.base.cra_flags |= CRYPTO_ALG_ASYNC | 852 + (alg->base.cra_flags & CRYPTO_ALG_INTERNAL); 854 853 inst->alg.base.cra_ctxsize = sizeof(struct cryptd_aead_ctx); 855 854 856 855 inst->alg.ivsize = crypto_aead_alg_ivsize(alg); ··· 885 884 886 885 switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) { 887 886 case CRYPTO_ALG_TYPE_SKCIPHER: 888 - return cryptd_create_skcipher(tmpl, tb, &queue); 887 + return cryptd_create_skcipher(tmpl, tb, algt, &queue); 889 888 case CRYPTO_ALG_TYPE_HASH: 890 - return cryptd_create_hash(tmpl, tb, &queue); 889 + return cryptd_create_hash(tmpl, tb, algt, &queue); 891 890 case CRYPTO_ALG_TYPE_AEAD: 892 - return cryptd_create_aead(tmpl, tb, &queue); 891 + return cryptd_create_aead(tmpl, tb, algt, &queue); 893 892 } 894 893 895 894 return -EINVAL;
+5 -14
crypto/ctr.c
··· 256 256 static int crypto_rfc3686_create(struct crypto_template *tmpl, 257 257 struct rtattr **tb) 258 258 { 259 - struct crypto_attr_type *algt; 260 259 struct skcipher_instance *inst; 261 260 struct skcipher_alg *alg; 262 261 struct crypto_skcipher_spawn *spawn; 263 262 u32 mask; 264 - 265 263 int err; 266 264 267 - algt = crypto_get_attr_type(tb); 268 - if (IS_ERR(algt)) 269 - return PTR_ERR(algt); 270 - 271 - if ((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask) 272 - return -EINVAL; 265 + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SKCIPHER, &mask); 266 + if (err) 267 + return err; 268 + mask |= crypto_requires_off(crypto_get_attr_type(tb), 269 + CRYPTO_ALG_NEED_FALLBACK); 273 270 274 271 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); 275 272 if (!inst) 276 273 return -ENOMEM; 277 - 278 - mask = crypto_requires_sync(algt->type, algt->mask) | 279 - crypto_requires_off(algt->type, algt->mask, 280 - CRYPTO_ALG_NEED_FALLBACK); 281 274 282 275 spawn = skcipher_instance_ctx(inst); 283 276 ··· 302 309 inst->alg.base.cra_priority = alg->base.cra_priority; 303 310 inst->alg.base.cra_blocksize = 1; 304 311 inst->alg.base.cra_alignmask = alg->base.cra_alignmask; 305 - 306 - inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC; 307 312 308 313 inst->alg.ivsize = CTR_RFC3686_IV_SIZE; 309 314 inst->alg.chunksize = crypto_skcipher_alg_chunksize(alg);
+3 -10
crypto/cts.c
··· 325 325 { 326 326 struct crypto_skcipher_spawn *spawn; 327 327 struct skcipher_instance *inst; 328 - struct crypto_attr_type *algt; 329 328 struct skcipher_alg *alg; 330 329 u32 mask; 331 330 int err; 332 331 333 - algt = crypto_get_attr_type(tb); 334 - if (IS_ERR(algt)) 335 - return PTR_ERR(algt); 336 - 337 - if ((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask) 338 - return -EINVAL; 339 - 340 - mask = crypto_requires_sync(algt->type, algt->mask); 332 + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SKCIPHER, &mask); 333 + if (err) 334 + return err; 341 335 342 336 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); 343 337 if (!inst) ··· 358 364 if (err) 359 365 goto err_free_inst; 360 366 361 - inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC; 362 367 inst->alg.base.cra_priority = alg->base.cra_priority; 363 368 inst->alg.base.cra_blocksize = alg->base.cra_blocksize; 364 369 inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
+8 -3
crypto/essiv.c
··· 466 466 return PTR_ERR(shash_name); 467 467 468 468 type = algt->type & algt->mask; 469 - mask = crypto_requires_sync(algt->type, algt->mask); 469 + mask = crypto_algt_inherited_mask(algt); 470 470 471 471 switch (type) { 472 472 case CRYPTO_ALG_TYPE_SKCIPHER: ··· 525 525 /* Synchronous hash, e.g., "sha256" */ 526 526 _hash_alg = crypto_alg_mod_lookup(shash_name, 527 527 CRYPTO_ALG_TYPE_SHASH, 528 - CRYPTO_ALG_TYPE_MASK); 528 + CRYPTO_ALG_TYPE_MASK | mask); 529 529 if (IS_ERR(_hash_alg)) { 530 530 err = PTR_ERR(_hash_alg); 531 531 goto out_drop_skcipher; ··· 557 557 hash_alg->base.cra_driver_name) >= CRYPTO_MAX_ALG_NAME) 558 558 goto out_free_hash; 559 559 560 - base->cra_flags = block_base->cra_flags & CRYPTO_ALG_ASYNC; 560 + /* 561 + * hash_alg wasn't gotten via crypto_grab*(), so we need to inherit its 562 + * flags manually. 563 + */ 564 + base->cra_flags |= (hash_alg->base.cra_flags & 565 + CRYPTO_ALG_INHERITED_FLAGS); 561 566 base->cra_blocksize = block_base->cra_blocksize; 562 567 base->cra_ctxsize = sizeof(struct essiv_tfm_ctx); 563 568 base->cra_alignmask = block_base->cra_alignmask;
+9 -31
crypto/gcm.c
··· 578 578 const char *ctr_name, 579 579 const char *ghash_name) 580 580 { 581 - struct crypto_attr_type *algt; 582 581 u32 mask; 583 582 struct aead_instance *inst; 584 583 struct gcm_instance_ctx *ctx; ··· 585 586 struct hash_alg_common *ghash; 586 587 int err; 587 588 588 - algt = crypto_get_attr_type(tb); 589 - if (IS_ERR(algt)) 590 - return PTR_ERR(algt); 591 - 592 - if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) 593 - return -EINVAL; 594 - 595 - mask = crypto_requires_sync(algt->type, algt->mask); 589 + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD, &mask); 590 + if (err) 591 + return err; 596 592 597 593 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); 598 594 if (!inst) ··· 629 635 CRYPTO_MAX_ALG_NAME) 630 636 goto err_free_inst; 631 637 632 - inst->alg.base.cra_flags = (ghash->base.cra_flags | 633 - ctr->base.cra_flags) & CRYPTO_ALG_ASYNC; 634 638 inst->alg.base.cra_priority = (ghash->base.cra_priority + 635 639 ctr->base.cra_priority) / 2; 636 640 inst->alg.base.cra_blocksize = 1; ··· 827 835 static int crypto_rfc4106_create(struct crypto_template *tmpl, 828 836 struct rtattr **tb) 829 837 { 830 - struct crypto_attr_type *algt; 831 838 u32 mask; 832 839 struct aead_instance *inst; 833 840 struct crypto_aead_spawn *spawn; 834 841 struct aead_alg *alg; 835 842 int err; 836 843 837 - algt = crypto_get_attr_type(tb); 838 - if (IS_ERR(algt)) 839 - return PTR_ERR(algt); 840 - 841 - if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) 842 - return -EINVAL; 843 - 844 - mask = crypto_requires_sync(algt->type, algt->mask); 844 + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD, &mask); 845 + if (err) 846 + return err; 845 847 846 848 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); 847 849 if (!inst) ··· 868 882 CRYPTO_MAX_ALG_NAME) 869 883 goto err_free_inst; 870 884 871 - inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC; 872 885 inst->alg.base.cra_priority = alg->base.cra_priority; 873 886 inst->alg.base.cra_blocksize = 1; 874 887 inst->alg.base.cra_alignmask = alg->base.cra_alignmask; ··· 1042 1057 static int crypto_rfc4543_create(struct crypto_template *tmpl, 1043 1058 struct rtattr **tb) 1044 1059 { 1045 - struct crypto_attr_type *algt; 1046 1060 u32 mask; 1047 1061 struct aead_instance *inst; 1048 1062 struct aead_alg *alg; 1049 1063 struct crypto_rfc4543_instance_ctx *ctx; 1050 1064 int err; 1051 1065 1052 - algt = crypto_get_attr_type(tb); 1053 - if (IS_ERR(algt)) 1054 - return PTR_ERR(algt); 1055 - 1056 - if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) 1057 - return -EINVAL; 1058 - 1059 - mask = crypto_requires_sync(algt->type, algt->mask); 1066 + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD, &mask); 1067 + if (err) 1068 + return err; 1060 1069 1061 1070 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); 1062 1071 if (!inst) ··· 1083 1104 CRYPTO_MAX_ALG_NAME) 1084 1105 goto err_free_inst; 1085 1106 1086 - inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC; 1087 1107 inst->alg.base.cra_priority = alg->base.cra_priority; 1088 1108 inst->alg.base.cra_blocksize = 1; 1089 1109 inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
+3 -11
crypto/geniv.c
··· 42 42 struct rtattr **tb) 43 43 { 44 44 struct crypto_aead_spawn *spawn; 45 - struct crypto_attr_type *algt; 46 45 struct aead_instance *inst; 47 46 struct aead_alg *alg; 48 47 unsigned int ivsize; ··· 49 50 u32 mask; 50 51 int err; 51 52 52 - algt = crypto_get_attr_type(tb); 53 - if (IS_ERR(algt)) 54 - return ERR_CAST(algt); 55 - 56 - if ((algt->type ^ CRYPTO_ALG_TYPE_AEAD) & algt->mask) 57 - return ERR_PTR(-EINVAL); 53 + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AEAD, &mask); 54 + if (err) 55 + return ERR_PTR(err); 58 56 59 57 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); 60 58 if (!inst) 61 59 return ERR_PTR(-ENOMEM); 62 60 63 61 spawn = aead_instance_ctx(inst); 64 - 65 - /* Ignore async algorithms if necessary. */ 66 - mask = crypto_requires_sync(algt->type, algt->mask); 67 62 68 63 err = crypto_grab_aead(spawn, aead_crypto_instance(inst), 69 64 crypto_attr_alg_name(tb[1]), 0, mask); ··· 83 90 CRYPTO_MAX_ALG_NAME) 84 91 goto err_free_inst; 85 92 86 - inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC; 87 93 inst->alg.base.cra_priority = alg->base.cra_priority; 88 94 inst->alg.base.cra_blocksize = alg->base.cra_blocksize; 89 95 inst->alg.base.cra_alignmask = alg->base.cra_alignmask;
+3 -2
crypto/hmac.c
··· 168 168 struct crypto_shash_spawn *spawn; 169 169 struct crypto_alg *alg; 170 170 struct shash_alg *salg; 171 + u32 mask; 171 172 int err; 172 173 int ds; 173 174 int ss; 174 175 175 - err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH); 176 + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH, &mask); 176 177 if (err) 177 178 return err; 178 179 ··· 183 182 spawn = shash_instance_ctx(inst); 184 183 185 184 err = crypto_grab_shash(spawn, shash_crypto_instance(inst), 186 - crypto_attr_alg_name(tb[1]), 0, 0); 185 + crypto_attr_alg_name(tb[1]), 0, mask); 187 186 if (err) 188 187 goto err_free_inst; 189 188 salg = crypto_spawn_shash_alg(spawn);
+3 -10
crypto/lrw.c
··· 297 297 { 298 298 struct crypto_skcipher_spawn *spawn; 299 299 struct skcipher_instance *inst; 300 - struct crypto_attr_type *algt; 301 300 struct skcipher_alg *alg; 302 301 const char *cipher_name; 303 302 char ecb_name[CRYPTO_MAX_ALG_NAME]; 304 303 u32 mask; 305 304 int err; 306 305 307 - algt = crypto_get_attr_type(tb); 308 - if (IS_ERR(algt)) 309 - return PTR_ERR(algt); 310 - 311 - if ((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask) 312 - return -EINVAL; 313 - 314 - mask = crypto_requires_sync(algt->type, algt->mask); 306 + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SKCIPHER, &mask); 307 + if (err) 308 + return err; 315 309 316 310 cipher_name = crypto_attr_alg_name(tb[1]); 317 311 if (IS_ERR(cipher_name)) ··· 373 379 } else 374 380 goto err_free_inst; 375 381 376 - inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC; 377 382 inst->alg.base.cra_priority = alg->base.cra_priority; 378 383 inst->alg.base.cra_blocksize = LRW_BLOCK_SIZE; 379 384 inst->alg.base.cra_alignmask = alg->base.cra_alignmask |
+5 -9
crypto/pcrypt.c
··· 226 226 } 227 227 228 228 static int pcrypt_create_aead(struct crypto_template *tmpl, struct rtattr **tb, 229 - u32 type, u32 mask) 229 + struct crypto_attr_type *algt) 230 230 { 231 231 struct pcrypt_instance_ctx *ctx; 232 - struct crypto_attr_type *algt; 233 232 struct aead_instance *inst; 234 233 struct aead_alg *alg; 234 + u32 mask = crypto_algt_inherited_mask(algt); 235 235 int err; 236 - 237 - algt = crypto_get_attr_type(tb); 238 - if (IS_ERR(algt)) 239 - return PTR_ERR(algt); 240 236 241 237 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); 242 238 if (!inst) ··· 250 254 goto err_free_inst; 251 255 252 256 err = crypto_grab_aead(&ctx->spawn, aead_crypto_instance(inst), 253 - crypto_attr_alg_name(tb[1]), 0, 0); 257 + crypto_attr_alg_name(tb[1]), 0, mask); 254 258 if (err) 255 259 goto err_free_inst; 256 260 ··· 259 263 if (err) 260 264 goto err_free_inst; 261 265 262 - inst->alg.base.cra_flags = CRYPTO_ALG_ASYNC; 266 + inst->alg.base.cra_flags |= CRYPTO_ALG_ASYNC; 263 267 264 268 inst->alg.ivsize = crypto_aead_alg_ivsize(alg); 265 269 inst->alg.maxauthsize = crypto_aead_alg_maxauthsize(alg); ··· 294 298 295 299 switch (algt->type & algt->mask & CRYPTO_ALG_TYPE_MASK) { 296 300 case CRYPTO_ALG_TYPE_AEAD: 297 - return pcrypt_create_aead(tmpl, tb, algt->type, algt->mask); 301 + return pcrypt_create_aead(tmpl, tb, algt); 298 302 } 299 303 300 304 return -EINVAL;
+3 -10
crypto/rsa-pkcs1pad.c
··· 596 596 597 597 static int pkcs1pad_create(struct crypto_template *tmpl, struct rtattr **tb) 598 598 { 599 - struct crypto_attr_type *algt; 600 599 u32 mask; 601 600 struct akcipher_instance *inst; 602 601 struct pkcs1pad_inst_ctx *ctx; ··· 603 604 const char *hash_name; 604 605 int err; 605 606 606 - algt = crypto_get_attr_type(tb); 607 - if (IS_ERR(algt)) 608 - return PTR_ERR(algt); 609 - 610 - if ((algt->type ^ CRYPTO_ALG_TYPE_AKCIPHER) & algt->mask) 611 - return -EINVAL; 612 - 613 - mask = crypto_requires_sync(algt->type, algt->mask); 607 + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_AKCIPHER, &mask); 608 + if (err) 609 + return err; 614 610 615 611 inst = kzalloc(sizeof(*inst) + sizeof(*ctx), GFP_KERNEL); 616 612 if (!inst) ··· 652 658 goto err_free_inst; 653 659 } 654 660 655 - inst->alg.base.cra_flags = rsa_alg->base.cra_flags & CRYPTO_ALG_ASYNC; 656 661 inst->alg.base.cra_priority = rsa_alg->base.cra_priority; 657 662 inst->alg.base.cra_ctxsize = sizeof(struct pkcs1pad_ctx); 658 663
+4 -2
crypto/simd.c
··· 171 171 drvname) >= CRYPTO_MAX_ALG_NAME) 172 172 goto out_free_salg; 173 173 174 - alg->base.cra_flags = CRYPTO_ALG_ASYNC; 174 + alg->base.cra_flags = CRYPTO_ALG_ASYNC | 175 + (ialg->base.cra_flags & CRYPTO_ALG_INHERITED_FLAGS); 175 176 alg->base.cra_priority = ialg->base.cra_priority; 176 177 alg->base.cra_blocksize = ialg->base.cra_blocksize; 177 178 alg->base.cra_alignmask = ialg->base.cra_alignmask; ··· 418 417 drvname) >= CRYPTO_MAX_ALG_NAME) 419 418 goto out_free_salg; 420 419 421 - alg->base.cra_flags = CRYPTO_ALG_ASYNC; 420 + alg->base.cra_flags = CRYPTO_ALG_ASYNC | 421 + (ialg->base.cra_flags & CRYPTO_ALG_INHERITED_FLAGS); 422 422 alg->base.cra_priority = ialg->base.cra_priority; 423 423 alg->base.cra_blocksize = ialg->base.cra_blocksize; 424 424 alg->base.cra_alignmask = ialg->base.cra_alignmask;
+5 -10
crypto/skcipher.c
··· 934 934 struct skcipher_instance *skcipher_alloc_instance_simple( 935 935 struct crypto_template *tmpl, struct rtattr **tb) 936 936 { 937 - struct crypto_attr_type *algt; 938 937 u32 mask; 939 938 struct skcipher_instance *inst; 940 939 struct crypto_cipher_spawn *spawn; 941 940 struct crypto_alg *cipher_alg; 942 941 int err; 943 942 944 - algt = crypto_get_attr_type(tb); 945 - if (IS_ERR(algt)) 946 - return ERR_CAST(algt); 947 - 948 - if ((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask) 949 - return ERR_PTR(-EINVAL); 950 - 951 - mask = crypto_requires_off(algt->type, algt->mask, 952 - CRYPTO_ALG_NEED_FALLBACK); 943 + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SKCIPHER, &mask); 944 + if (err) 945 + return ERR_PTR(err); 946 + mask |= crypto_requires_off(crypto_get_attr_type(tb), 947 + CRYPTO_ALG_NEED_FALLBACK); 953 948 954 949 inst = kzalloc(sizeof(*inst) + sizeof(*spawn), GFP_KERNEL); 955 950 if (!inst)
+3 -2
crypto/vmac.c
··· 620 620 struct shash_instance *inst; 621 621 struct crypto_cipher_spawn *spawn; 622 622 struct crypto_alg *alg; 623 + u32 mask; 623 624 int err; 624 625 625 - err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH); 626 + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH, &mask); 626 627 if (err) 627 628 return err; 628 629 ··· 633 632 spawn = shash_instance_ctx(inst); 634 633 635 634 err = crypto_grab_cipher(spawn, shash_crypto_instance(inst), 636 - crypto_attr_alg_name(tb[1]), 0, 0); 635 + crypto_attr_alg_name(tb[1]), 0, mask); 637 636 if (err) 638 637 goto err_free_inst; 639 638 alg = crypto_spawn_cipher_alg(spawn);
+3 -2
crypto/xcbc.c
··· 191 191 struct crypto_cipher_spawn *spawn; 192 192 struct crypto_alg *alg; 193 193 unsigned long alignmask; 194 + u32 mask; 194 195 int err; 195 196 196 - err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH); 197 + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SHASH, &mask); 197 198 if (err) 198 199 return err; 199 200 ··· 204 203 spawn = shash_instance_ctx(inst); 205 204 206 205 err = crypto_grab_cipher(spawn, shash_crypto_instance(inst), 207 - crypto_attr_alg_name(tb[1]), 0, 0); 206 + crypto_attr_alg_name(tb[1]), 0, mask); 208 207 if (err) 209 208 goto err_free_inst; 210 209 alg = crypto_spawn_cipher_alg(spawn);
+5 -12
crypto/xts.c
··· 331 331 static int create(struct crypto_template *tmpl, struct rtattr **tb) 332 332 { 333 333 struct skcipher_instance *inst; 334 - struct crypto_attr_type *algt; 335 334 struct xts_instance_ctx *ctx; 336 335 struct skcipher_alg *alg; 337 336 const char *cipher_name; 338 337 u32 mask; 339 338 int err; 340 339 341 - algt = crypto_get_attr_type(tb); 342 - if (IS_ERR(algt)) 343 - return PTR_ERR(algt); 344 - 345 - if ((algt->type ^ CRYPTO_ALG_TYPE_SKCIPHER) & algt->mask) 346 - return -EINVAL; 340 + err = crypto_check_attr_type(tb, CRYPTO_ALG_TYPE_SKCIPHER, &mask); 341 + if (err) 342 + return err; 343 + mask |= crypto_requires_off(crypto_get_attr_type(tb), 344 + CRYPTO_ALG_NEED_FALLBACK); 347 345 348 346 cipher_name = crypto_attr_alg_name(tb[1]); 349 347 if (IS_ERR(cipher_name)) ··· 352 354 return -ENOMEM; 353 355 354 356 ctx = skcipher_instance_ctx(inst); 355 - 356 - mask = crypto_requires_off(algt->type, algt->mask, 357 - CRYPTO_ALG_NEED_FALLBACK | 358 - CRYPTO_ALG_ASYNC); 359 357 360 358 err = crypto_grab_skcipher(&ctx->spawn, skcipher_crypto_instance(inst), 361 359 cipher_name, 0, mask); ··· 409 415 } else 410 416 goto err_free_inst; 411 417 412 - inst->alg.base.cra_flags = alg->base.cra_flags & CRYPTO_ALG_ASYNC; 413 418 inst->alg.base.cra_priority = alg->base.cra_priority; 414 419 inst->alg.base.cra_blocksize = XTS_BLOCK_SIZE; 415 420 inst->alg.base.cra_alignmask = alg->base.cra_alignmask |
+16 -7
include/crypto/algapi.h
··· 116 116 void *crypto_spawn_tfm2(struct crypto_spawn *spawn); 117 117 118 118 struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb); 119 - int crypto_check_attr_type(struct rtattr **tb, u32 type); 119 + int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret); 120 120 const char *crypto_attr_alg_name(struct rtattr *rta); 121 121 int crypto_attr_u32(struct rtattr *rta, u32 *num); 122 122 int crypto_inst_setname(struct crypto_instance *inst, const char *name, ··· 235 235 container_of(queue->backlog, struct crypto_async_request, list); 236 236 } 237 237 238 - static inline int crypto_requires_off(u32 type, u32 mask, u32 off) 238 + static inline u32 crypto_requires_off(struct crypto_attr_type *algt, u32 off) 239 239 { 240 - return (type ^ off) & mask & off; 240 + return (algt->type ^ off) & algt->mask & off; 241 241 } 242 242 243 243 /* 244 - * Returns CRYPTO_ALG_ASYNC if type/mask requires the use of sync algorithms. 245 - * Otherwise returns zero. 244 + * When an algorithm uses another algorithm (e.g., if it's an instance of a 245 + * template), these are the flags that should always be set on the "outer" 246 + * algorithm if any "inner" algorithm has them set. 246 247 */ 247 - static inline int crypto_requires_sync(u32 type, u32 mask) 248 + #define CRYPTO_ALG_INHERITED_FLAGS CRYPTO_ALG_ASYNC 249 + 250 + /* 251 + * Given the type and mask that specify the flags restrictions on a template 252 + * instance being created, return the mask that should be passed to 253 + * crypto_grab_*() (along with type=0) to honor any request the user made to 254 + * have any of the CRYPTO_ALG_INHERITED_FLAGS clear. 255 + */ 256 + static inline u32 crypto_algt_inherited_mask(struct crypto_attr_type *algt) 248 257 { 249 - return crypto_requires_off(type, mask, CRYPTO_ALG_ASYNC); 258 + return crypto_requires_off(algt, CRYPTO_ALG_INHERITED_FLAGS); 250 259 } 251 260 252 261 noinline unsigned long __crypto_memneq(const void *a, const void *b, size_t size);