Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: crypto4xx - Remove ahash-related code

The hash implementation in crypto4xx has been disabled since 2009.
As nobody has tried to fix this remove all the dead code.

Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

Herbert Xu 97855e7f 7916eddc

+1 -155
-106
drivers/crypto/amcc/crypto4xx_alg.c
··· 12 12 #include <linux/interrupt.h> 13 13 #include <linux/spinlock_types.h> 14 14 #include <linux/scatterlist.h> 15 - #include <linux/crypto.h> 16 - #include <linux/hash.h> 17 - #include <crypto/internal/hash.h> 18 15 #include <linux/dma-mapping.h> 19 16 #include <crypto/algapi.h> 20 17 #include <crypto/aead.h> ··· 598 601 int crypto4xx_decrypt_aes_gcm(struct aead_request *req) 599 602 { 600 603 return crypto4xx_crypt_aes_gcm(req, true); 601 - } 602 - 603 - /* 604 - * HASH SHA1 Functions 605 - */ 606 - static int crypto4xx_hash_alg_init(struct crypto_tfm *tfm, 607 - unsigned int sa_len, 608 - unsigned char ha, 609 - unsigned char hm) 610 - { 611 - struct crypto_alg *alg = tfm->__crt_alg; 612 - struct crypto4xx_alg *my_alg; 613 - struct crypto4xx_ctx *ctx = crypto_tfm_ctx(tfm); 614 - struct dynamic_sa_hash160 *sa; 615 - int rc; 616 - 617 - my_alg = container_of(__crypto_ahash_alg(alg), struct crypto4xx_alg, 618 - alg.u.hash); 619 - ctx->dev = my_alg->dev; 620 - 621 - /* Create SA */ 622 - if (ctx->sa_in || ctx->sa_out) 623 - crypto4xx_free_sa(ctx); 624 - 625 - rc = crypto4xx_alloc_sa(ctx, sa_len); 626 - if (rc) 627 - return rc; 628 - 629 - crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm), 630 - sizeof(struct crypto4xx_ctx)); 631 - sa = (struct dynamic_sa_hash160 *)ctx->sa_in; 632 - set_dynamic_sa_command_0(&sa->ctrl, SA_SAVE_HASH, SA_NOT_SAVE_IV, 633 - SA_NOT_LOAD_HASH, SA_LOAD_IV_FROM_SA, 634 - SA_NO_HEADER_PROC, ha, SA_CIPHER_ALG_NULL, 635 - SA_PAD_TYPE_ZERO, SA_OP_GROUP_BASIC, 636 - SA_OPCODE_HASH, DIR_INBOUND); 637 - set_dynamic_sa_command_1(&sa->ctrl, 0, SA_HASH_MODE_HASH, 638 - CRYPTO_FEEDBACK_MODE_NO_FB, SA_EXTENDED_SN_OFF, 639 - SA_SEQ_MASK_OFF, SA_MC_ENABLE, 640 - SA_NOT_COPY_PAD, SA_NOT_COPY_PAYLOAD, 641 - SA_NOT_COPY_HDR); 642 - /* Need to zero hash digest in SA */ 643 - memset(sa->inner_digest, 0, sizeof(sa->inner_digest)); 644 - memset(sa->outer_digest, 0, sizeof(sa->outer_digest)); 645 - 646 - return 0; 647 - } 648 - 649 - int crypto4xx_hash_init(struct ahash_request *req) 650 - { 651 - struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm); 652 - int ds; 653 - struct dynamic_sa_ctl *sa; 654 - 655 - sa = ctx->sa_in; 656 - ds = crypto_ahash_digestsize( 657 - __crypto_ahash_cast(req->base.tfm)); 658 - sa->sa_command_0.bf.digest_len = ds >> 2; 659 - sa->sa_command_0.bf.load_hash_state = SA_LOAD_HASH_FROM_SA; 660 - 661 - return 0; 662 - } 663 - 664 - int crypto4xx_hash_update(struct ahash_request *req) 665 - { 666 - struct crypto_ahash *ahash = crypto_ahash_reqtfm(req); 667 - struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm); 668 - struct scatterlist dst; 669 - unsigned int ds = crypto_ahash_digestsize(ahash); 670 - 671 - sg_init_one(&dst, req->result, ds); 672 - 673 - return crypto4xx_build_pd(&req->base, ctx, req->src, &dst, 674 - req->nbytes, NULL, 0, ctx->sa_in, 675 - ctx->sa_len, 0, NULL); 676 - } 677 - 678 - int crypto4xx_hash_final(struct ahash_request *req) 679 - { 680 - return 0; 681 - } 682 - 683 - int crypto4xx_hash_digest(struct ahash_request *req) 684 - { 685 - struct crypto_ahash *ahash = crypto_ahash_reqtfm(req); 686 - struct crypto4xx_ctx *ctx = crypto_tfm_ctx(req->base.tfm); 687 - struct scatterlist dst; 688 - unsigned int ds = crypto_ahash_digestsize(ahash); 689 - 690 - sg_init_one(&dst, req->result, ds); 691 - 692 - return crypto4xx_build_pd(&req->base, ctx, req->src, &dst, 693 - req->nbytes, NULL, 0, ctx->sa_in, 694 - ctx->sa_len, 0, NULL); 695 - } 696 - 697 - /* 698 - * SHA1 Algorithm 699 - */ 700 - int crypto4xx_sha1_alg_init(struct crypto_tfm *tfm) 701 - { 702 - return crypto4xx_hash_alg_init(tfm, SA_HASH160_LEN, SA_HASH_ALG_SHA1, 703 - SA_HASH_MODE_HASH); 704 604 }
+1 -42
drivers/crypto/amcc/crypto4xx_core.c
··· 485 485 } 486 486 } 487 487 488 - static void crypto4xx_copy_digest_to_dst(void *dst, 489 - struct pd_uinfo *pd_uinfo, 490 - struct crypto4xx_ctx *ctx) 491 - { 492 - struct dynamic_sa_ctl *sa = (struct dynamic_sa_ctl *) ctx->sa_in; 493 - 494 - if (sa->sa_command_0.bf.hash_alg == SA_HASH_ALG_SHA1) { 495 - memcpy(dst, pd_uinfo->sr_va->save_digest, 496 - SA_HASH_ALG_SHA1_DIGEST_SIZE); 497 - } 498 - } 499 - 500 488 static void crypto4xx_ret_sg_desc(struct crypto4xx_device *dev, 501 489 struct pd_uinfo *pd_uinfo) 502 490 { ··· 535 547 if (pd_uinfo->state & PD_ENTRY_BUSY) 536 548 skcipher_request_complete(req, -EINPROGRESS); 537 549 skcipher_request_complete(req, 0); 538 - } 539 - 540 - static void crypto4xx_ahash_done(struct crypto4xx_device *dev, 541 - struct pd_uinfo *pd_uinfo) 542 - { 543 - struct crypto4xx_ctx *ctx; 544 - struct ahash_request *ahash_req; 545 - 546 - ahash_req = ahash_request_cast(pd_uinfo->async_req); 547 - ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(ahash_req)); 548 - 549 - crypto4xx_copy_digest_to_dst(ahash_req->result, pd_uinfo, ctx); 550 - crypto4xx_ret_sg_desc(dev, pd_uinfo); 551 - 552 - if (pd_uinfo->state & PD_ENTRY_BUSY) 553 - ahash_request_complete(ahash_req, -EINPROGRESS); 554 - ahash_request_complete(ahash_req, 0); 555 550 } 556 551 557 552 static void crypto4xx_aead_done(struct crypto4xx_device *dev, ··· 612 641 break; 613 642 case CRYPTO_ALG_TYPE_AEAD: 614 643 crypto4xx_aead_done(dev, pd_uinfo, pd); 615 - break; 616 - case CRYPTO_ALG_TYPE_AHASH: 617 - crypto4xx_ahash_done(dev, pd_uinfo); 618 644 break; 619 645 } 620 646 } ··· 880 912 } 881 913 882 914 pd->pd_ctl.w = PD_CTL_HOST_READY | 883 - ((crypto_tfm_alg_type(req->tfm) == CRYPTO_ALG_TYPE_AHASH) || 884 - (crypto_tfm_alg_type(req->tfm) == CRYPTO_ALG_TYPE_AEAD) ? 915 + ((crypto_tfm_alg_type(req->tfm) == CRYPTO_ALG_TYPE_AEAD) ? 885 916 PD_CTL_HASH_FINAL : 0); 886 917 pd->pd_ctl_len.w = 0x00400000 | (assoclen + datalen); 887 918 pd_uinfo->state = PD_ENTRY_INUSE | (is_busy ? PD_ENTRY_BUSY : 0); ··· 986 1019 rc = crypto_register_aead(&alg->alg.u.aead); 987 1020 break; 988 1021 989 - case CRYPTO_ALG_TYPE_AHASH: 990 - rc = crypto_register_ahash(&alg->alg.u.hash); 991 - break; 992 - 993 1022 case CRYPTO_ALG_TYPE_RNG: 994 1023 rc = crypto_register_rng(&alg->alg.u.rng); 995 1024 break; ··· 1011 1048 list_for_each_entry_safe(alg, tmp, &sec_dev->alg_list, entry) { 1012 1049 list_del(&alg->entry); 1013 1050 switch (alg->alg.type) { 1014 - case CRYPTO_ALG_TYPE_AHASH: 1015 - crypto_unregister_ahash(&alg->alg.u.hash); 1016 - break; 1017 - 1018 1051 case CRYPTO_ALG_TYPE_AEAD: 1019 1052 crypto_unregister_aead(&alg->alg.u.aead); 1020 1053 break;
-7
drivers/crypto/amcc/crypto4xx_core.h
··· 16 16 #include <linux/ratelimit.h> 17 17 #include <linux/mutex.h> 18 18 #include <linux/scatterlist.h> 19 - #include <crypto/internal/hash.h> 20 19 #include <crypto/internal/aead.h> 21 20 #include <crypto/internal/rng.h> 22 21 #include <crypto/internal/skcipher.h> ··· 134 135 u32 type; 135 136 union { 136 137 struct skcipher_alg cipher; 137 - struct ahash_alg hash; 138 138 struct aead_alg aead; 139 139 struct rng_alg rng; 140 140 } u; ··· 181 183 int crypto4xx_decrypt_noiv_block(struct skcipher_request *req); 182 184 int crypto4xx_rfc3686_encrypt(struct skcipher_request *req); 183 185 int crypto4xx_rfc3686_decrypt(struct skcipher_request *req); 184 - int crypto4xx_sha1_alg_init(struct crypto_tfm *tfm); 185 - int crypto4xx_hash_digest(struct ahash_request *req); 186 - int crypto4xx_hash_final(struct ahash_request *req); 187 - int crypto4xx_hash_update(struct ahash_request *req); 188 - int crypto4xx_hash_init(struct ahash_request *req); 189 186 190 187 /* 191 188 * Note: Only use this function to copy items that is word aligned.