Linux kernel mirror (for testing) git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git
kernel os linux

crypto: nx - convert AES-ECB to skcipher API

Convert the PowerPC Nest (NX) implementation of AES-ECB from the
deprecated "blkcipher" API to the "skcipher" API. This is needed in
order for the blkcipher API to be removed.

Signed-off-by: Eric Biggers <ebiggers@google.com>
Reviewed-by: Ard Biesheuvel <ard.biesheuvel@linaro.org>
Signed-off-by: Herbert Xu <herbert@gondor.apana.org.au>

authored by

Eric Biggers and committed by
Herbert Xu
bfd9efdd 7740bd51

+58 -51
+32 -44
drivers/crypto/nx/nx-aes-ecb.c
··· 18 18 #include "nx.h" 19 19 20 20 21 - static int ecb_aes_nx_set_key(struct crypto_tfm *tfm, 22 - const u8 *in_key, 23 - unsigned int key_len) 21 + static int ecb_aes_nx_set_key(struct crypto_skcipher *tfm, 22 + const u8 *in_key, 23 + unsigned int key_len) 24 24 { 25 - struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(tfm); 25 + struct nx_crypto_ctx *nx_ctx = crypto_skcipher_ctx(tfm); 26 26 struct nx_csbcpb *csbcpb = (struct nx_csbcpb *)nx_ctx->csbcpb; 27 27 28 28 nx_ctx_init(nx_ctx, HCOP_FC_AES); ··· 50 50 return 0; 51 51 } 52 52 53 - static int ecb_aes_nx_crypt(struct blkcipher_desc *desc, 54 - struct scatterlist *dst, 55 - struct scatterlist *src, 56 - unsigned int nbytes, 57 - int enc) 53 + static int ecb_aes_nx_crypt(struct skcipher_request *req, 54 + int enc) 58 55 { 59 - struct nx_crypto_ctx *nx_ctx = crypto_blkcipher_ctx(desc->tfm); 56 + struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req); 57 + struct nx_crypto_ctx *nx_ctx = crypto_skcipher_ctx(tfm); 60 58 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; 61 59 unsigned long irq_flags; 62 60 unsigned int processed = 0, to_process; ··· 68 70 NX_CPB_FDM(csbcpb) &= ~NX_FDM_ENDE_ENCRYPT; 69 71 70 72 do { 71 - to_process = nbytes - processed; 73 + to_process = req->cryptlen - processed; 72 74 73 - rc = nx_build_sg_lists(nx_ctx, NULL, dst, src, &to_process, 74 - processed, NULL); 75 + rc = nx_build_sg_lists(nx_ctx, NULL, req->dst, req->src, 76 + &to_process, processed, NULL); 75 77 if (rc) 76 78 goto out; 77 79 ··· 81 83 } 82 84 83 85 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, 84 - desc->flags & CRYPTO_TFM_REQ_MAY_SLEEP); 86 + req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP); 85 87 if (rc) 86 88 goto out; 87 89 ··· 90 92 &(nx_ctx->stats->aes_bytes)); 91 93 92 94 processed += to_process; 93 - } while (processed < nbytes); 95 + } while (processed < req->cryptlen); 94 96 95 97 out: 96 98 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags); 97 99 return rc; 98 100 } 99 101 100 - static int ecb_aes_nx_encrypt(struct blkcipher_desc *desc, 101 - struct scatterlist *dst, 102 - struct scatterlist *src, 103 - unsigned int nbytes) 102 + static int ecb_aes_nx_encrypt(struct skcipher_request *req) 104 103 { 105 - return ecb_aes_nx_crypt(desc, dst, src, nbytes, 1); 104 + return ecb_aes_nx_crypt(req, 1); 106 105 } 107 106 108 - static int ecb_aes_nx_decrypt(struct blkcipher_desc *desc, 109 - struct scatterlist *dst, 110 - struct scatterlist *src, 111 - unsigned int nbytes) 107 + static int ecb_aes_nx_decrypt(struct skcipher_request *req) 112 108 { 113 - return ecb_aes_nx_crypt(desc, dst, src, nbytes, 0); 109 + return ecb_aes_nx_crypt(req, 0); 114 110 } 115 111 116 - struct crypto_alg nx_ecb_aes_alg = { 117 - .cra_name = "ecb(aes)", 118 - .cra_driver_name = "ecb-aes-nx", 119 - .cra_priority = 300, 120 - .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER, 121 - .cra_blocksize = AES_BLOCK_SIZE, 122 - .cra_alignmask = 0xf, 123 - .cra_ctxsize = sizeof(struct nx_crypto_ctx), 124 - .cra_type = &crypto_blkcipher_type, 125 - .cra_module = THIS_MODULE, 126 - .cra_init = nx_crypto_ctx_aes_ecb_init, 127 - .cra_exit = nx_crypto_ctx_exit, 128 - .cra_blkcipher = { 129 - .min_keysize = AES_MIN_KEY_SIZE, 130 - .max_keysize = AES_MAX_KEY_SIZE, 131 - .setkey = ecb_aes_nx_set_key, 132 - .encrypt = ecb_aes_nx_encrypt, 133 - .decrypt = ecb_aes_nx_decrypt, 134 - } 112 + struct skcipher_alg nx_ecb_aes_alg = { 113 + .base.cra_name = "ecb(aes)", 114 + .base.cra_driver_name = "ecb-aes-nx", 115 + .base.cra_priority = 300, 116 + .base.cra_blocksize = AES_BLOCK_SIZE, 117 + .base.cra_alignmask = 0xf, 118 + .base.cra_ctxsize = sizeof(struct nx_crypto_ctx), 119 + .base.cra_module = THIS_MODULE, 120 + .init = nx_crypto_ctx_aes_ecb_init, 121 + .exit = nx_crypto_ctx_skcipher_exit, 122 + .min_keysize = AES_MIN_KEY_SIZE, 123 + .max_keysize = AES_MAX_KEY_SIZE, 124 + .setkey = ecb_aes_nx_set_key, 125 + .encrypt = ecb_aes_nx_encrypt, 126 + .decrypt = ecb_aes_nx_decrypt, 135 127 };
+23 -5
drivers/crypto/nx/nx.c
··· 517 517 crypto_register_alg(alg) : 0; 518 518 } 519 519 520 + static int nx_register_skcipher(struct skcipher_alg *alg, u32 fc, u32 mode) 521 + { 522 + return nx_check_props(&nx_driver.viodev->dev, fc, mode) ? 523 + crypto_register_skcipher(alg) : 0; 524 + } 525 + 520 526 static int nx_register_aead(struct aead_alg *alg, u32 fc, u32 mode) 521 527 { 522 528 return nx_check_props(&nx_driver.viodev->dev, fc, mode) ? ··· 541 535 { 542 536 if (nx_check_props(NULL, fc, mode)) 543 537 crypto_unregister_alg(alg); 538 + } 539 + 540 + static void nx_unregister_skcipher(struct skcipher_alg *alg, u32 fc, u32 mode) 541 + { 542 + if (nx_check_props(NULL, fc, mode)) 543 + crypto_unregister_skcipher(alg); 544 544 } 545 545 546 546 static void nx_unregister_aead(struct aead_alg *alg, u32 fc, u32 mode) ··· 585 573 586 574 nx_driver.of.status = NX_OKAY; 587 575 588 - rc = nx_register_alg(&nx_ecb_aes_alg, NX_FC_AES, NX_MODE_AES_ECB); 576 + rc = nx_register_skcipher(&nx_ecb_aes_alg, NX_FC_AES, NX_MODE_AES_ECB); 589 577 if (rc) 590 578 goto out; 591 579 ··· 649 637 out_unreg_cbc: 650 638 nx_unregister_alg(&nx_cbc_aes_alg, NX_FC_AES, NX_MODE_AES_CBC); 651 639 out_unreg_ecb: 652 - nx_unregister_alg(&nx_ecb_aes_alg, NX_FC_AES, NX_MODE_AES_ECB); 640 + nx_unregister_skcipher(&nx_ecb_aes_alg, NX_FC_AES, NX_MODE_AES_ECB); 653 641 out: 654 642 return rc; 655 643 } ··· 728 716 NX_MODE_AES_CBC); 729 717 } 730 718 731 - int nx_crypto_ctx_aes_ecb_init(struct crypto_tfm *tfm) 719 + int nx_crypto_ctx_aes_ecb_init(struct crypto_skcipher *tfm) 732 720 { 733 - return nx_crypto_ctx_init(crypto_tfm_ctx(tfm), NX_FC_AES, 721 + return nx_crypto_ctx_init(crypto_skcipher_ctx(tfm), NX_FC_AES, 734 722 NX_MODE_AES_ECB); 735 723 } 736 724 ··· 762 750 nx_ctx->csbcpb_aead = NULL; 763 751 nx_ctx->in_sg = NULL; 764 752 nx_ctx->out_sg = NULL; 753 + } 754 + 755 + void nx_crypto_ctx_skcipher_exit(struct crypto_skcipher *tfm) 756 + { 757 + nx_crypto_ctx_exit(crypto_skcipher_ctx(tfm)); 765 758 } 766 759 767 760 void nx_crypto_ctx_aead_exit(struct crypto_aead *tfm) ··· 818 801 nx_unregister_alg(&nx_ctr3686_aes_alg, 819 802 NX_FC_AES, NX_MODE_AES_CTR); 820 803 nx_unregister_alg(&nx_cbc_aes_alg, NX_FC_AES, NX_MODE_AES_CBC); 821 - nx_unregister_alg(&nx_ecb_aes_alg, NX_FC_AES, NX_MODE_AES_ECB); 804 + nx_unregister_skcipher(&nx_ecb_aes_alg, NX_FC_AES, 805 + NX_MODE_AES_ECB); 822 806 } 823 807 824 808 return 0;
+3 -2
drivers/crypto/nx/nx.h
··· 147 147 int nx_crypto_ctx_aes_xcbc_init(struct crypto_tfm *tfm); 148 148 int nx_crypto_ctx_aes_ctr_init(struct crypto_tfm *tfm); 149 149 int nx_crypto_ctx_aes_cbc_init(struct crypto_tfm *tfm); 150 - int nx_crypto_ctx_aes_ecb_init(struct crypto_tfm *tfm); 150 + int nx_crypto_ctx_aes_ecb_init(struct crypto_skcipher *tfm); 151 151 int nx_crypto_ctx_sha_init(struct crypto_tfm *tfm); 152 152 void nx_crypto_ctx_exit(struct crypto_tfm *tfm); 153 + void nx_crypto_ctx_skcipher_exit(struct crypto_skcipher *tfm); 153 154 void nx_crypto_ctx_aead_exit(struct crypto_aead *tfm); 154 155 void nx_ctx_init(struct nx_crypto_ctx *nx_ctx, unsigned int function); 155 156 int nx_hcall_sync(struct nx_crypto_ctx *ctx, struct vio_pfo_op *op, ··· 177 176 #define NX_PAGE_NUM(x) ((u64)(x) & 0xfffffffffffff000ULL) 178 177 179 178 extern struct crypto_alg nx_cbc_aes_alg; 180 - extern struct crypto_alg nx_ecb_aes_alg; 179 + extern struct skcipher_alg nx_ecb_aes_alg; 181 180 extern struct aead_alg nx_gcm_aes_alg; 182 181 extern struct aead_alg nx_gcm4106_aes_alg; 183 182 extern struct crypto_alg nx_ctr3686_aes_alg;